<html xmlns:v="urn:schemas-microsoft-com:vml" xmlns:o="urn:schemas-microsoft-com:office:office" xmlns:w="urn:schemas-microsoft-com:office:word" xmlns:m="http://schemas.microsoft.com/office/2004/12/omml" xmlns="http://www.w3.org/TR/REC-html40">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<meta name="Generator" content="Microsoft Word 15 (filtered medium)">
<style><!--
/* Font Definitions */
@font-face
        {font-family:"Cambria Math";
        panose-1:2 4 5 3 5 4 6 3 2 4;}
@font-face
        {font-family:Calibri;
        panose-1:2 15 5 2 2 2 4 3 2 4;}
@font-face
        {font-family:"Calibri Light";
        panose-1:2 15 3 2 2 2 4 3 2 4;}
@font-face
        {font-family:"Roboto Light";
        panose-1:2 0 0 0 0 0 0 0 0 0;}
@font-face
        {font-family:"Times New Roman \(Body CS\)";
        panose-1:2 2 6 3 5 4 5 2 3 4;}
/* Style Definitions */
p.MsoNormal, li.MsoNormal, div.MsoNormal
        {margin:0cm;
        margin-bottom:.0001pt;
        font-size:10.0pt;
        font-family:"Calibri",sans-serif;}
h2
        {mso-style-priority:9;
        mso-style-link:"Heading 2 Char";
        mso-margin-top-alt:auto;
        margin-right:0cm;
        mso-margin-bottom-alt:auto;
        margin-left:0cm;
        font-size:18.0pt;
        font-family:"Calibri",sans-serif;}
a:link, span.MsoHyperlink
        {mso-style-priority:99;
        color:blue;
        text-decoration:underline;}
a:visited, span.MsoHyperlinkFollowed
        {mso-style-priority:99;
        color:purple;
        text-decoration:underline;}
p.msonormal0, li.msonormal0, div.msonormal0
        {mso-style-name:msonormal;
        mso-margin-top-alt:auto;
        margin-right:0cm;
        mso-margin-bottom-alt:auto;
        margin-left:0cm;
        font-size:11.0pt;
        font-family:"Calibri",sans-serif;}
span.Heading2Char
        {mso-style-name:"Heading 2 Char";
        mso-style-priority:9;
        mso-style-link:"Heading 2";
        font-family:"Calibri Light",sans-serif;
        color:#2F5496;}
span.EmailStyle19
        {mso-style-type:personal;
        font-family:"Calibri",sans-serif;
        color:windowtext;}
span.EmailStyle20
        {mso-style-type:personal-reply;
        font-family:"Roboto Light";
        color:windowtext;}
.MsoChpDefault
        {mso-style-type:export-only;
        font-size:10.0pt;}
@page WordSection1
        {size:612.0pt 792.0pt;
        margin:72.0pt 72.0pt 72.0pt 72.0pt;}
div.WordSection1
        {page:WordSection1;}
/* List Definitions */
@list l0
        {mso-list-id:319120384;
        mso-list-template-ids:957391606;}
@list l0:level1
        {mso-level-tab-stop:36.0pt;
        mso-level-number-position:left;
        text-indent:-18.0pt;}
@list l0:level2
        {mso-level-tab-stop:72.0pt;
        mso-level-number-position:left;
        text-indent:-18.0pt;}
@list l0:level3
        {mso-level-tab-stop:108.0pt;
        mso-level-number-position:left;
        text-indent:-18.0pt;}
@list l0:level4
        {mso-level-tab-stop:144.0pt;
        mso-level-number-position:left;
        text-indent:-18.0pt;}
@list l0:level5
        {mso-level-tab-stop:180.0pt;
        mso-level-number-position:left;
        text-indent:-18.0pt;}
@list l0:level6
        {mso-level-tab-stop:216.0pt;
        mso-level-number-position:left;
        text-indent:-18.0pt;}
@list l0:level7
        {mso-level-tab-stop:252.0pt;
        mso-level-number-position:left;
        text-indent:-18.0pt;}
@list l0:level8
        {mso-level-tab-stop:288.0pt;
        mso-level-number-position:left;
        text-indent:-18.0pt;}
@list l0:level9
        {mso-level-tab-stop:324.0pt;
        mso-level-number-position:left;
        text-indent:-18.0pt;}
@list l1
        {mso-list-id:1264731142;
        mso-list-template-ids:-1518595722;}
ol
        {margin-bottom:0cm;}
ul
        {margin-bottom:0cm;}
--></style>
</head>
<body lang="EN-CA" link="blue" vlink="purple">
<div class="WordSection1">
<p class="MsoNormal"><span style="font-size:10.5pt;font-family:"Roboto Light"">FYI - a post-doc opportunity from Dr. Jeremy Cooperstock.<o:p></o:p></span></p>
<p class="MsoNormal"><span style="font-size:10.5pt;font-family:"Roboto Light""><o:p> </o:p></span></p>
<div>
<p class="MsoNormal" style="background:white"><b><span lang="FR-CA" style="font-family:"Arial",sans-serif;color:#595959;border:none windowtext 1.0pt;padding:0cm">Diana Le</span></b><span style="color:black"><o:p></o:p></span></p>
<p class="MsoNormal" style="background:white"><span lang="FR-CA" style="font-family:"Arial",sans-serif;color:#595959;border:none windowtext 1.0pt;padding:0cm">Project Administrator</span><span lang="FR-CA" style="font-family:"Arial",sans-serif;color:#0070C0;border:none windowtext 1.0pt;padding:0cm">,
</span><span lang="FR-CA" style="font-family:"Arial",sans-serif;color:#595959;border:none windowtext 1.0pt;padding:0cm">NeuroHub</span><span lang="FR-CA" style="font-family:"Arial",sans-serif;color:#0070C0;border:none windowtext 1.0pt;padding:0cm"><o:p></o:p></span></p>
<p class="MsoNormal"><span lang="FR-CA" style="font-family:"Arial",sans-serif;color:#595959;border:none windowtext 1.0pt;padding:0cm">Montr</span><span style="font-family:"Arial",sans-serif;color:#595959;background:white">é</span><span lang="FR-CA" style="font-family:"Arial",sans-serif;color:#595959;border:none windowtext 1.0pt;padding:0cm">al
 Neurological Institute, Faculty of Medicine and Health Sciences</span><span lang="FR-CA" style="font-family:"Arial",sans-serif;color:#C00000;border:none windowtext 1.0pt;padding:0cm"> |</span><span lang="FR-CA" style="font-family:"Arial",sans-serif;color:#0070C0;border:none windowtext 1.0pt;padding:0cm"> </span><span lang="FR-CA" style="font-family:"Arial",sans-serif;color:#595959;border:none windowtext 1.0pt;padding:0cm">McGill
 University</span><span style="font-size:11.0pt"><o:p></o:p></span></p>
<p class="MsoNormal"><span lang="FR-CA" style="font-family:"Arial",sans-serif;color:#595959;border:none windowtext 1.0pt;padding:0cm">1010 Sherbrooke, Suite 1800, Montr</span><span style="font-family:"Arial",sans-serif;color:#595959;background:white">é</span><span lang="FR-CA" style="font-family:"Arial",sans-serif;color:#595959;border:none windowtext 1.0pt;padding:0cm">al
 (Qu</span><span style="font-family:"Arial",sans-serif;color:#595959;background:white">é</span><span lang="FR-CA" style="font-family:"Arial",sans-serif;color:#595959;border:none windowtext 1.0pt;padding:0cm">bec) H3A 2R7</span><span style="font-size:11.0pt"><o:p></o:p></span></p>
<p class="MsoNormal"><span lang="FR-CA" style="font-family:"Arial",sans-serif;color:#595959;border:none windowtext 1.0pt;padding:0cm">https://neurohub.ca
</span><span lang="FR-CA" style="font-family:"Arial",sans-serif;color:#C00000;border:none windowtext 1.0pt;padding:0cm;background:white">|
</span><span lang="FR-CA" style="font-family:"Arial",sans-serif;color:#595959;border:none windowtext 1.0pt;padding:0cm">support@neurohub.ca<o:p></o:p></span></p>
<p class="MsoNormal"><span style="color:black"><o:p> </o:p></span></p>
</div>
<p class="MsoNormal"><span style="font-size:11.0pt"> </span><o:p></o:p></p>
<div style="border:none;border-top:solid #B5C4DF 1.0pt;padding:3.0pt 0cm 0cm 0cm">
<p class="MsoNormal" style="margin-bottom:12.0pt"><b><span style="font-size:12.0pt;color:black">From:
</span></b><span style="font-size:12.0pt;color:black">Jeremy Cooperstock <jer@cim.mcgill.ca><br>
<b>Date: </b>Monday, July 24, 2023 at 4:57 PM</span><o:p></o:p></p>
</div>
<h2 style="background:white"><span style="font-family:"Arial",sans-serif;color:black">Post-doctoral fellows or research associates in Computational Neuroscience, AI and Acoustics</span><o:p></o:p></h2>
<p class="MsoNormal" style="background:white;font-variant-ligatures: normal;font-variant-caps:
      normal;orphans: 2;widows:
      2;-webkit-text-stroke-width: 0px;text-decoration-thickness: initial;text-decoration-style:
      initial;text-decoration-color: initial;word-spacing:0px">
<i><span style="font-size:12.0pt;font-family:"Arial",sans-serif;color:black">Join our "bionic ears" project and help us tackle the cocktail party problem! We are recruiting research associates or post-doctoral fellows who will conduct research on improving
 intelligibility of speech in noisy environments using cognitive load measurements obtained from EEG indicators. We expect this approach to result in improved sound quality from hearing aids, which are presently unable to distinguish between important sounds
 to enhance and distracting noise to suppress.</span></i><o:p></o:p></p>
<p class="MsoNormal" style="background:white;font-variant-ligatures: normal;font-variant-caps:
      normal;orphans: 2;widows:
      2;-webkit-text-stroke-width: 0px;text-decoration-thickness: initial;text-decoration-style:
      initial;text-decoration-color: initial;word-spacing:0px">
<i><span style="font-size:12.0pt;font-family:"Arial",sans-serif;color:black"> </span></i><o:p></o:p></p>
<p class="MsoNormal" style="background:white;font-variant-ligatures: normal;font-variant-caps: normal;orphans: 2;widows: 2;-webkit-text-stroke-width: 0px;text-decoration-thickness: initial;text-decoration-style:
      initial;text-decoration-color: initial;word-spacing:0px">
<span style="font-family:"Arial",sans-serif;color:black">The cocktail party problem poses a challenge to hearing aids, in that devices cannot distinguish between important sounds to enhance and distracting noise to suppress, and thus, they enhance all sounds
 and noise equally. We aim to solve this problem by developing a miniaturized system to allow head-worn devices to enhance a user's desired sounds and improve perceived speech when the user encounters several sounds from the environment, without manual intervention
 from the user. To do so, the system must understand the level of mental exertion a user is employing to listen to the desired sound, or their cognitive load, which we posit can be done purely from brain signals. Then, in an attempt to lower the cognitive load,
 the audio parameters can be automatically tuned to ease listening effort and improve speech intelligibility. The results of this project will benefit anyone who wants to improve speech intelligibility in noisy environments, but in particular, will be of value
 for the hard-of-hearing population, who need the support of hearing assistance devices.</span><o:p></o:p></p>
<p class="MsoNormal" style="background:white;font-variant-ligatures: normal;font-variant-caps: normal;orphans: 2;widows: 2;-webkit-text-stroke-width: 0px;text-decoration-thickness: initial;text-decoration-style:
      initial;text-decoration-color: initial;word-spacing:0px">
<span style="font-family:"Arial",sans-serif;color:black"> </span><o:p></o:p></p>
<p class="MsoNormal" style="background:white;font-variant-ligatures: normal;font-variant-caps: normal;orphans: 2;widows: 2;-webkit-text-stroke-width: 0px;text-decoration-thickness: initial;text-decoration-style:
      initial;text-decoration-color: initial;word-spacing:0px">
<span style="font-family:"Arial",sans-serif;color:black">The project involves a fusion of technologies, including brain and bio-signal analysis, advanced acoustic processing, artificial intelligence, and embedded system and hardware design. The system first
 decodes the cognitive load of the user from brain signals, such as EEG, and bio-signals, simply from unobtrusive sensors placed in and around the ears of a user, as would be expected for a hearing assistance device. Then, a complementary acoustic technology
 is employed that tunes the audio parameters to improve speech intelligibility, and delivers the resulting sounds, favoring the target sound as enhanced audio over noise. This will be done using objective and subjective analyses techniques based on signal to
 noise ratio (SNR) performance, delay, user satisfaction, and other means. From this research, we want to understand how the two elements could be combined, where the EEG markers used to indicate the level of cognitive overload will be used to tune the rendered
 sounds in order to increase speech intelligibility in a real-time, closed-loop system.</span><o:p></o:p></p>
<p class="MsoNormal" style="background:white;font-variant-ligatures: normal;font-variant-caps: normal;orphans: 2;widows: 2;-webkit-text-stroke-width: 0px;text-decoration-thickness: initial;text-decoration-style:
      initial;text-decoration-color: initial;word-spacing:0px">
<span style="font-family:"Arial",sans-serif;color:black"> </span><o:p></o:p></p>
<p class="MsoNormal" style="background:white;font-variant-ligatures: normal;font-variant-caps: normal;orphans: 2;widows: 2;-webkit-text-stroke-width: 0px;text-decoration-thickness: initial;text-decoration-style:
      initial;text-decoration-color: initial;word-spacing:0px">
<span style="font-family:"Arial",sans-serif;color:black">We are recruiting:</span><o:p></o:p></p>
<p class="MsoNormal" style="background:white;font-variant-ligatures: normal;font-variant-caps: normal;orphans: 2;widows: 2;-webkit-text-stroke-width: 0px;text-decoration-thickness: initial;text-decoration-style:
      initial;text-decoration-color: initial;word-spacing:0px">
<span style="font-family:"Arial",sans-serif;color:black"> </span><o:p></o:p></p>
<p class="MsoNormal" style="background:white;font-variant-ligatures: normal;font-variant-caps: normal;orphans: 2;widows: 2;-webkit-text-stroke-width: 0px;text-decoration-thickness: initial;text-decoration-style:
      initial;text-decoration-color: initial;word-spacing:0px">
<b><span style="font-family:"Arial",sans-serif;color:black">1. Computational Neuroscientist</span></b><span style="font-family:"Arial",sans-serif;color:black">: primary activities involve collecting participant data for electroencephalography (EEG) studies,
 developing artificial intelligence models to process and analyze these data.</span><o:p></o:p></p>
<p class="MsoNormal" style="background:white;font-variant-ligatures: normal;font-variant-caps: normal;orphans: 2;widows: 2;-webkit-text-stroke-width: 0px;text-decoration-thickness: initial;text-decoration-style:
      initial;text-decoration-color: initial;word-spacing:0px">
<b><span style="font-family:"Arial",sans-serif;color:black">2. AI and Acoustics researcher</span></b><span style="font-family:"Arial",sans-serif;color:black">: primary activities involve working with ML/deep learning models to carry out processing of acoustic
 time series data, signal analysis and interpretation of EEG data.</span><o:p></o:p></p>
<p class="MsoNormal" style="background:white;font-variant-ligatures: normal;font-variant-caps: normal;orphans: 2;widows: 2;-webkit-text-stroke-width: 0px;text-decoration-thickness: initial;text-decoration-style:
      initial;text-decoration-color: initial;word-spacing:0px">
<span style="font-family:"Arial",sans-serif;color:black"> </span><o:p></o:p></p>
<p class="MsoNormal" style="background:white;font-variant-ligatures: normal;font-variant-caps: normal;orphans: 2;widows: 2;-webkit-text-stroke-width: 0px;text-decoration-thickness: initial;text-decoration-style:
      initial;text-decoration-color: initial;word-spacing:0px">
<span style="font-family:"Arial",sans-serif;color:black">Candidates in both positions will be expected to collaborate and participate actively in research dissemination, including the preparation of research publications. A travel budget is available to support
 presentation of your work in top-tier venues. This project is being conducted with industry partner <a href="https://aavaa.com/"><span style="color:#2A22D2;text-decoration:none">AAVAA</span></a>, and is supported by an NSERC Alliance and MEDTEQ Partenar-IA
 grant.</span><o:p></o:p></p>
<h2 style="background:white;font-variant-ligatures:
      normal;font-variant-caps: normal;orphans: 2;widows: 2;-webkit-text-stroke-width:
      0px;text-decoration-thickness: initial;text-decoration-style:
      initial;text-decoration-color: initial;word-spacing:0px" id="apply">
<span style="font-family:"Arial",sans-serif;color:black">How to Apply</span><o:p></o:p></h2>
<p class="MsoNormal" style="background:white;font-variant-ligatures: normal;font-variant-caps: normal;orphans: 2;widows: 2;-webkit-text-stroke-width: 0px;text-decoration-thickness: initial;text-decoration-style:
      initial;text-decoration-color: initial;word-spacing:0px">
<span style="font-family:"Arial",sans-serif;color:black">To apply for any of these positions, please email <a href="mailto:srl-jobs@cim.mcgill.ca"><span style="color:#2A22D2;text-decoration:none">Jeremy Cooperstock</span></a>, preferably including::</span><o:p></o:p></p>
<ol start="1" type="1">
<li class="MsoNormal" style="color:black;mso-margin-top-alt:auto;mso-margin-bottom-alt:auto;mso-list:l0 level1 lfo3;background:white">
<span style="font-family:"Arial",sans-serif">A brief letter of application, describing your qualifications and relevant experience to the position of interest, along with your dates of availability.</span><o:p></o:p></li><li class="MsoNormal" style="color:black;mso-margin-top-alt:auto;mso-margin-bottom-alt:auto;mso-list:l0 level1 lfo3;background:white">
<span style="font-family:"Arial",sans-serif">Detailed CV with links to online papers and/or project portfolios.</span><o:p></o:p></li><li class="MsoNormal" style="color:black;mso-margin-top-alt:auto;mso-margin-bottom-alt:auto;mso-list:l0 level1 lfo3;background:white">
<span style="font-family:"Arial",sans-serif">Three (3) reference letters (sent separately).</span><o:p></o:p></li></ol>
<p class="MsoNormal" style="background:white;font-variant-ligatures: normal;font-variant-caps: normal;orphans: 2;widows: 2;-webkit-text-stroke-width: 0px;text-decoration-thickness: initial;text-decoration-style:
      initial;text-decoration-color: initial;word-spacing:0px">
<span style="font-family:"Arial",sans-serif;color:black">The positions are available immediately, with a reasonably flexible start date. Informal inquiries are welcome.</span><o:p></o:p></p>
<p class="MsoNormal" style="background:white;font-variant-ligatures: normal;font-variant-caps: normal;orphans: 2;widows: 2;-webkit-text-stroke-width: 0px;text-decoration-thickness: initial;text-decoration-style:
      initial;text-decoration-color: initial;word-spacing:0px">
<span style="font-family:"Arial",sans-serif;color:black"> </span><o:p></o:p></p>
<p class="MsoNormal" style="background:white;font-variant-ligatures: normal;font-variant-caps: normal;orphans: 2;widows: 2;-webkit-text-stroke-width: 0px;text-decoration-thickness: initial;text-decoration-style:
      initial;text-decoration-color: initial;word-spacing:0px">
<b><span style="font-family:"Arial",sans-serif;color:black">About us</span></b><span style="font-family:"Arial",sans-serif;color:black">: The <a href="http://srl.mcgill.ca/"><span style="color:#2A22D2;text-decoration:none">Shared Reality Lab</span></a> conducts
 research in audio, video, and haptic technologies, building systems that leverage their capabilities to facilitate and enrich both human-computer and computer-mediated human-human interaction. The lab is part of the <a href="http://cim.mcgill.ca/"><span style="color:#2A22D2;text-decoration:none">Centre
 for Intelligent Machines</span></a> and <a href="https://www.mcgill.ca/ece/"><span style="color:#2A22D2;text-decoration:none">Department of Electrical and Computer Engineering</span></a> of <a href="https://www.mcgill.ca/"><span style="color:#2A22D2;text-decoration:none">McGill
 University</span></a>. McGill, one of Canada's most prestigious universities, is located in Montreal, a <a href="https://montrealgazette.com/news/local-news/montreal-named-worlds-best-student-city"><span style="color:#2A22D2;text-decoration:none">top city
 to live in</span></a>, especially for students.</span><o:p></o:p></p>
<p class="MsoNormal" style="background:white;font-variant-ligatures: normal;font-variant-caps: normal;orphans: 2;widows: 2;-webkit-text-stroke-width: 0px;text-decoration-thickness: initial;text-decoration-style:
      initial;text-decoration-color: initial;word-spacing:0px">
<span style="font-family:"Arial",sans-serif;color:black"> </span><o:p></o:p></p>
<p class="MsoNormal" style="background:white;font-variant-ligatures: normal;font-variant-caps: normal;orphans: 2;widows: 2;-webkit-text-stroke-width: 0px;text-decoration-thickness: initial;text-decoration-style:
      initial;text-decoration-color: initial;word-spacing:0px">
<span style="font-family:"Arial",sans-serif;color:black">McGill University is committed to equity in employment and diversity. It welcomes applications from women, Aboriginal persons, persons with disabilities, ethnic minorities, persons of minority sexual
 orientation or gender identity, visible minorities, and others who may contribute to further diversification. In Quebec, "Postdoctoral Fellow" is a regulated category of trainee. Notably, a postdoctoral candidate must be within five years of graduating with
 a Ph.D. For more information, please consult <a href="https://www.mcgill.ca/gps/postdocs/fellows"><span style="color:#2A22D2;text-decoration:none">www.mcgill.ca/gps/postdocs/fellows</span></a>.</span><o:p></o:p></p>
<p class="MsoNormal" style="margin-bottom:12.0pt"><span style="font-size:11.0pt"> </span><o:p></o:p></p>
<p class="MsoNormal"><o:p> </o:p></p>
</div>
</body>
</html>