was read the article
array:23 [ "pii" => "S1697260023000352" "issn" => "16972600" "doi" => "10.1016/j.ijchp.2023.100399" "estado" => "S300" "fechaPublicacion" => "2023-10-01" "aid" => "100399" "copyright" => "The Authors" "copyrightAnyo" => "2023" "documento" => "article" "crossmark" => 1 "subdocumento" => "fla" "cita" => "Int J Clin Health Psychol. 2023;23:" "abierto" => array:3 [ "ES" => true "ES2" => true "LATM" => true ] "gratuito" => true "lecturas" => array:1 [ "total" => 0 ] "itemSiguiente" => array:17 [ "pii" => "S1697260023000339" "issn" => "16972600" "doi" => "10.1016/j.ijchp.2023.100397" "estado" => "S300" "fechaPublicacion" => "2023-10-01" "aid" => "100397" "documento" => "article" "crossmark" => 1 "subdocumento" => "fla" "cita" => "Int J Clin Health Psychol. 2023;23:" "abierto" => array:3 [ "ES" => true "ES2" => true "LATM" => true ] "gratuito" => true "lecturas" => array:1 [ "total" => 0 ] "en" => array:11 [ "idiomaDefecto" => true "cabecera" => "<span class="elsevierStyleTextfn">Original article</span>" "titulo" => "Neuroanatomical and functional substrates of the hypomanic personality trait and its prediction on aggression" "tienePdf" => "en" "tieneTextoCompleto" => "en" "tieneResumen" => "en" "contieneResumen" => array:1 [ "en" => true ] "contieneTextoCompleto" => array:1 [ "en" => true ] "contienePdf" => array:1 [ "en" => true ] "resumenGrafico" => array:2 [ "original" => 0 "multimedia" => array:8 [ "identificador" => "fig0003" "etiqueta" => "Fig. 3" "tipo" => "MULTIMEDIAFIGURA" "mostrarFloat" => true "mostrarDisplay" => false "figura" => array:1 [ 0 => array:4 [ "imagen" => "gr3.jpeg" "Alto" => 2123 "Ancho" => 2500 "Tamanyo" => 343748 ] ] "detalles" => array:1 [ 0 => array:3 [ "identificador" => "alt0003" "detalle" => "Fig " "rol" => "short" ] ] "descripcion" => array:1 [ "en" => "<p id="spara003" class="elsevierStyleSimplePara elsevierViewall">Activations in the brain regions as responses to the immediate- and delay-loss magnitude associated with HPT. Neural responses to the immediate-loss magnitude (IM) were negatively correlated with Excitement (A), Social vitality (D), and the total scores of HPT (B) whereas the delay-loss-related (DL) neural responses were positively correlated with Social vitality (E) and the HPT total scores (C).</p>" ] ] ] "autores" => array:1 [ 0 => array:2 [ "autoresLista" => "Wenwei Zhu, Xiongying Chen, Jie Wu, Zixi Li, Hohjin Im, Shuning Chen, Kun Deng, Bin Zhang, Chuqiao Wei, Junjiao Feng, Manman Zhang, Shaofeng Yang, He Wang, Qiang Wang" "autores" => array:14 [ 0 => array:2 [ "nombre" => "Wenwei" "apellidos" => "Zhu" ] 1 => array:2 [ "nombre" => "Xiongying" "apellidos" => "Chen" ] 2 => array:2 [ "nombre" => "Jie" "apellidos" => "Wu" ] 3 => array:2 [ "nombre" => "Zixi" "apellidos" => "Li" ] 4 => array:2 [ "nombre" => "Hohjin" "apellidos" => "Im" ] 5 => array:2 [ "nombre" => "Shuning" "apellidos" => "Chen" ] 6 => array:2 [ "nombre" => "Kun" "apellidos" => "Deng" ] 7 => array:2 [ "nombre" => "Bin" "apellidos" => "Zhang" ] 8 => array:2 [ "nombre" => "Chuqiao" "apellidos" => "Wei" ] 9 => array:2 [ "nombre" => "Junjiao" "apellidos" => "Feng" ] 10 => array:2 [ "nombre" => "Manman" "apellidos" => "Zhang" ] 11 => array:2 [ "nombre" => "Shaofeng" "apellidos" => "Yang" ] 12 => array:2 [ "nombre" => "He" "apellidos" => "Wang" ] 13 => array:2 [ "nombre" => "Qiang" "apellidos" => "Wang" ] ] ] ] ] "idiomaDefecto" => "en" "EPUB" => "https://multimedia.elsevier.es/PublicationsMultimediaV1/item/epub/S1697260023000339?idApp=UINPBA00004N" "url" => "/16972600/0000002300000004/v1_202310051056/S1697260023000339/v1_202310051056/en/main.assets" ] "itemAnterior" => array:18 [ "pii" => "S1697260023000364" "issn" => "16972600" "doi" => "10.1016/j.ijchp.2023.100400" "estado" => "S300" "fechaPublicacion" => "2023-10-01" "aid" => "100400" "copyright" => "The Author(s)" "documento" => "article" "crossmark" => 1 "subdocumento" => "fla" "cita" => "Int J Clin Health Psychol. 2023;23:" "abierto" => array:3 [ "ES" => true "ES2" => true "LATM" => true ] "gratuito" => true "lecturas" => array:1 [ "total" => 0 ] "en" => array:10 [ "idiomaDefecto" => true "titulo" => "Association of physical activity and fitness with executive function among preschoolers" "tienePdf" => "en" "tieneTextoCompleto" => "en" "tieneResumen" => "en" "contieneResumen" => array:1 [ "en" => true ] "contieneTextoCompleto" => array:1 [ "en" => true ] "contienePdf" => array:1 [ "en" => true ] "resumenGrafico" => array:2 [ "original" => 0 "multimedia" => array:8 [ "identificador" => "fig0001" "etiqueta" => "Fig. 1" "tipo" => "MULTIMEDIAFIGURA" "mostrarFloat" => true "mostrarDisplay" => false "figura" => array:1 [ 0 => array:4 [ "imagen" => "gr1.jpeg" "Alto" => 2006 "Ancho" => 2500 "Tamanyo" => 251380 ] ] "detalles" => array:1 [ 0 => array:3 [ "identificador" => "alt0001" "detalle" => "Fig " "rol" => "short" ] ] "descripcion" => array:1 [ "en" => "<p id="spara001" class="elsevierStyleSimplePara elsevierViewall">Process of recruitment and participant selection.</p>" ] ] ] "autores" => array:1 [ 0 => array:2 [ "autoresLista" => "Xun Luo, Fabian Herold, Sebastian Ludyga, Markus Gerber, Keita Kamijo, Matthew B. Pontifex, Charles H. Hillman, Bandon L. Alderman, Notger G. Müller, Arthur F. Kramer, Toru Ishihara, Wook Song, Liye Zou" "autores" => array:13 [ 0 => array:2 [ "nombre" => "Xun" "apellidos" => "Luo" ] 1 => array:2 [ "nombre" => "Fabian" "apellidos" => "Herold" ] 2 => array:2 [ "nombre" => "Sebastian" "apellidos" => "Ludyga" ] 3 => array:2 [ "nombre" => "Markus" "apellidos" => "Gerber" ] 4 => array:2 [ "nombre" => "Keita" "apellidos" => "Kamijo" ] 5 => array:2 [ "nombre" => "Matthew B." "apellidos" => "Pontifex" ] 6 => array:2 [ "nombre" => "Charles H." "apellidos" => "Hillman" ] 7 => array:2 [ "nombre" => "Bandon L." "apellidos" => "Alderman" ] 8 => array:2 [ "nombre" => "Notger G." "apellidos" => "Müller" ] 9 => array:2 [ "nombre" => "Arthur F." "apellidos" => "Kramer" ] 10 => array:2 [ "nombre" => "Toru" "apellidos" => "Ishihara" ] 11 => array:2 [ "nombre" => "Wook" "apellidos" => "Song" ] 12 => array:2 [ "nombre" => "Liye" "apellidos" => "Zou" ] ] ] ] ] "idiomaDefecto" => "en" "EPUB" => "https://multimedia.elsevier.es/PublicationsMultimediaV1/item/epub/S1697260023000364?idApp=UINPBA00004N" "url" => "/16972600/0000002300000004/v1_202310051056/S1697260023000364/v1_202310051056/en/main.assets" ] "en" => array:19 [ "idiomaDefecto" => true "titulo" => "Diminished emotion recognition with reduced face gaze in complex situation in individuals with broad autism phenotype" "tieneTextoCompleto" => true "autores" => array:1 [ 0 => array:4 [ "autoresLista" => "Woo-Jin Cha, Kiho Kim" "autores" => array:2 [ 0 => array:3 [ "nombre" => "Woo-Jin" "apellidos" => "Cha" "referencia" => array:2 [ 0 => array:2 [ "etiqueta" => "<span class="elsevierStyleSup">a</span>" "identificador" => "aff0001" ] 1 => array:2 [ "etiqueta" => "<span class="elsevierStyleSup">1</span>" "identificador" => "fn1" ] ] ] 1 => array:4 [ "nombre" => "Kiho" "apellidos" => "Kim" "email" => array:1 [ 0 => "kihohere@sjcu.ac.kr" ] "referencia" => array:2 [ 0 => array:2 [ "etiqueta" => "<span class="elsevierStyleSup">b</span>" "identificador" => "aff0002" ] 1 => array:2 [ "etiqueta" => "<span class="elsevierStyleSup">*</span>" "identificador" => "cor0001" ] ] ] ] "afiliaciones" => array:2 [ 0 => array:3 [ "entidad" => "Department of Psychology, Chung-Ang University, 84, Heukseok-ro, Dongjak-gu, Seoul 06974, Republic of Korea" "etiqueta" => "a" "identificador" => "aff0001" ] 1 => array:3 [ "entidad" => "Department of Psychology of Counseling, Sejong Cyber University, Cheonho-daero 680, Gwangjingu, Seoul 04992, Republic of Korea" "etiqueta" => "b" "identificador" => "aff0002" ] ] "correspondencia" => array:1 [ 0 => array:3 [ "identificador" => "cor0001" "etiqueta" => "⁎" "correspondencia" => "Corresponding author." ] ] ] ] "titulosAlternativos" => array:1 [ "en" => array:1 [ "titulo" => "Disminución del reconocimiento de emociones en situaciones complejas debido a la reducción de la mirada a la cara en individuos con fenotipo de autismo amplio" ] ] "resumenGrafico" => array:2 [ "original" => 0 "multimedia" => array:8 [ "identificador" => "fig0001" "etiqueta" => "Fig. 1" "tipo" => "MULTIMEDIAFIGURA" "mostrarFloat" => true "mostrarDisplay" => false "figura" => array:1 [ 0 => array:4 [ "imagen" => "gr1.jpeg" "Alto" => 1809 "Ancho" => 2667 "Tamanyo" => 230391 ] ] "detalles" => array:1 [ 0 => array:3 [ "identificador" => "alt0001" "detalle" => "Fig " "rol" => "short" ] ] "descripcion" => array:1 [ "en" => "<p id="spara001" class="elsevierStyleSimplePara elsevierViewall">Correlation heatmap between ability to recognize emotions and autistic traits. P1: Phase 1 (emotion recognition task); P2: Phase 2 (emotions in context task); con: congruent condition; incon: incongruent condition. *<span class="elsevierStyleItalic">p</span> < .05. **<span class="elsevierStyleItalic">p</span> < .01.</p>" ] ] ] "textoCompleto" => "<span class="elsevierStyleSections"><span id="sec0001" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleSectionTitle" id="cesectitle0007">Introduction</span><p id="para0006" class="elsevierStylePara elsevierViewall">The broad autism phenotype (BAP) has been defined as a range of traits qualitatively similar to autism spectrum disorder (ASD) but with a mild level of symptoms. While individuals with BAP experience higher clinical distress compared to the general population, they do not show clinically severe symptoms that cause social and occupational impairment. These individuals experience significantly higher levels of depression and anxiety compared to the general population and these symptoms could have been caused by their reduced social interaction and social skills (<a class="elsevierStyleCrossRef" href="#bib0017">Kulasinghe et al., 2021</a>). Previous studies have focused on the defects in the social interaction of the BAP group. Relationships between defects in social interaction and BAP level were reported to be mediated by social cognition, particularly facial emotion recognition (<a class="elsevierStyleCrossRef" href="#bib0030">Sasson et al., 2013</a>). Compared to the general population, the BAP group has a lower ability to recognize facial expressions (<a class="elsevierStyleCrossRef" href="#bib0037">Stewart et al., 2020</a>). As people reveal their emotions and intentions through their faces primarily, recognizing emotions through others’ faces in social interaction is critical (<a class="elsevierStyleCrossRef" href="#bib0007">Costa-Cordella, 2021</a>).</p><p id="para0007" class="elsevierStylePara elsevierViewall">In individuals with BAP, their diminished ability to recognize emotion could be caused by their unique cognitive patterns. On emotion recognition, facial expressions rarely appear without complex environments. In complex situations, people recognize emotions correctly by considering faces and contexts simultaneously (<a class="elsevierStyleCrossRef" href="#bib0018">Le et al., 2022</a>). However, individuals with BAP may find considering faces and contexts simultaneously difficult because of their unique cognitive patterns, as described by weak central coherence theory (WCCT). The WCCT indicates that individuals with ASD have cognitive patterns that may prioritize local and detailed parts, as opposed to the general population's cognitive patterns, which aim to integrate multiple components (<a class="elsevierStyleCrossRef" href="#bib0010">Happe & Frith, 2006</a>). The cognitive pattern related to the WCCT was also found in the BAP group (<a class="elsevierStyleCrossRef" href="#bib0025">Nayar et al., 2022</a>). Moreover, during social interaction, individuals in the BAP and ASD groups tend to mainly look at hands or other areas instead of faces. These characteristics result in reduced effectiveness to detect a target's emotion (<a class="elsevierStyleCrossRef" href="#bib0029">Rigby et al., 2016</a>).</p><p id="para0008" class="elsevierStylePara elsevierViewall">While previous studies have reported that the BAP group showed a diminished ability to recognize other people's emotions, the diminished ability does not appear severe enough to directly cause clinical impairment. Most studies suggest that the BAP group has a diminished ability to recognize emotions, but some have reported that there is no difference in recognizing emotion between the BAP group and the general population (<a class="elsevierStyleCrossRef" href="#bib0027">Pisula & Ziegart-Sadowska, 2015</a>). Combining these mixed results, a diminished ability to recognize emotions in the BAP group could be identified in complex conditions. For example, when a BAP group was asked to match the emotion of the facial stimuli with the presented context or match the face with ambiguous emotions (e.g., low emotional intensity), the group showed a significantly diminished ability to recognize emotion (<a class="elsevierStyleCrossRef" href="#bib0006">Cha & Lee, 2022</a>; <a class="elsevierStyleCrossRef" href="#bib0037">Stewart et al., 2020</a>). However, when a BAP group was asked to identify a face's emotion presented alone or to match the basic emotion including happiness, they exhibited a similar emotion recognition ability to the general population (<a class="elsevierStyleCrossRef" href="#bib0012">Holt et al., 2014</a>; <a class="elsevierStyleCrossRef" href="#bib0035">Soto-Icaza et al., 2022</a>). In summary, the more difficult the measurement of emotion recognition ability, the clearer the decrease in the emotion recognition ability of the BAP.</p><p id="para0009" class="elsevierStylePara elsevierViewall">First, this study aims to investigate whether the diminished emotion recognition ability of the BAP group can be identified more clearly as the task becomes more difficult. Hence, we investigate the conditions wherein the BAP group shows a diminished ability to recognize facial expressions. To diversify the conditions of emotion recognition tasks, we divide two types of emotion recognition tasks into three conditions. Moreover, we also use four types of emotions (i.e., fear, anger, sadness, happiness) and two types of emotional intensity (i.e., full-blown, mild). Second, this study aims to identify whether their diminished emotion recognition ability could be influenced by their unique cognitive patterns measured as eye-gaze patterns. Hence, we use an eye tracker to determine the gaze duration on the face during the task wherein the BAP group showed a decline in emotion recognition ability. This study presents the following hypotheses: (1) the BAP group would show a diminished ability to recognize emotions in faces compared to the general population as the task becomes more difficult and (2) the BAP group would look less at faces when they recognize emotions in tasks where they exhibit diminished ability.</p></span><span id="sec0002" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleSectionTitle" id="cesectitle0008">Method</span><span id="sec0003" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleSectionTitle" id="cesectitle0009">Participants</span><p id="para0010" class="elsevierStylePara elsevierViewall">In this study, we recruited 512 participants from several universities in Seoul, Korea, through advertisements in online communities and internet bulletin boards. To screen for individuals with BAP, participants were asked to complete the Korean Version of the Broad Autism Phenotype Questionnaire (BAPQ) and Autism-Spectrum Quotient (AQ) (<a class="elsevierStyleCrossRef" href="#bib0015">Kim & Kim, 2022</a>; <a class="elsevierStyleCrossRef" href="#bib0016">Ko et al., 2018</a>). Subjects with scores of 3.15 or higher and 23 or higher on the BAPQ and AQ, respectively, were classified under the BAP group. Individuals with low scores on the BAPQ and AQ were preferentially selected as the control group. We used the following exclusion criteria for the present study: (1) diagnosis of other psychiatric disorders and (2) participation in any other pharmacological treatment. Finally, we sorted 81 participants into the following two groups: (a) the BAP group (<span class="elsevierStyleItalic">n</span> = 41) and (b) control group (<span class="elsevierStyleItalic">n</span> = 40).</p></span><span id="sec0004" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleSectionTitle" id="cesectitle0010">Questionnaires and measurement</span><span id="sec0005" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleSectionTitle" id="cesectitle0011">Broad autism phenotype questionnaire (BAPQ)</span><p id="para0011" class="elsevierStylePara elsevierViewall">The BAPQ is a 36-item self-reported scale developed to assess autistic traits (<a class="elsevierStyleCrossRef" href="#bib0013">Hurley et al., 2007</a>; <a class="elsevierStyleCrossRef" href="#bib0015">Kim & Kim, 2022</a>). BAPQ provides quantitative information associated with the core symptoms of ASD: aloof, pragmatic language, and rigid. We calculated the total score by averaging all 36 items. Each domain's scores were calculated by averaging the 12 items for each subscale. A higher BAPQ score corresponds to a higher level of autistic traits. Cronbach's α was 0.89 and 0.94 in the validation and present studies, respectively.</p></span><span id="sec0006" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleSectionTitle" id="cesectitle0012">Autism-spectrum quotient (AQ)</span><p id="para0012" class="elsevierStylePara elsevierViewall">The AQ was developed to assess the five domains of autistic traits: social skills, communication, imagination, attention to detail, and attention switching (<a class="elsevierStyleCrossRef" href="#bib0002">Baron-Cohen et al., 2001</a>; <a class="elsevierStyleCrossRef" href="#bib0016">Ko et al., 2018</a>). The AQ is a 50-item self-report questionnaire. Scores above 32 indicate clinically significant levels of autistic traits in the original scale. Scores above 23 could be used as an effective cut-off point for accurately classifying the maximum number of individuals with BAP in the Korean version of the AQ. Cronbach's α was 0.85 and 0.88 in the validation and present studies, respectively.</p></span><span id="sec0007" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleSectionTitle" id="cesectitle0013">Beck depression inventory-second edition (BDI-II)</span><p id="para0013" class="elsevierStylePara elsevierViewall">The BDI-II was developed to assess the severity of cognitive and physical symptoms resulting from depression in the 2 weeks prior to testing (<a class="elsevierStyleCrossRef" href="#bib0004">Beck et al., 1996</a>; <a class="elsevierStyleCrossRef" href="#bib0019">Lee et al., 2017</a>). BDI-II is composed of 21 items. A higher BDI-II score indicates a higher level of depression. Cronbach's α was 0.89 and 0.91 in the validation and present studies, respectively.</p></span><span id="sec0008" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleSectionTitle" id="cesectitle0014">Beck anxiety inventory (BAI)</span><p id="para0014" class="elsevierStylePara elsevierViewall">The BAI was developed to measure physical and cognitive symptoms of anxiety in the week prior to testing (<a class="elsevierStyleCrossRef" href="#bib0003">Beck et al., 1988</a>; <a class="elsevierStyleCrossRef" href="#bib0039">Yook & Kim, 1997</a>). BAI is composed of 21 items. A higher BAI score indicates a higher anxiety level. Cronbach's α was 0.91 and 0.88 in the validation and present studies, respectively.</p></span></span><span id="sec0009" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleSectionTitle" id="cesectitle0015">Behavioral task</span><span id="sec0010" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleSectionTitle" id="cesectitle0016">Phase 1: emotion recognition task</span><p id="para0015" class="elsevierStylePara elsevierViewall">The emotion recognition task (ERT) comprised eight colored facial stimuli from the “Extended ChaeLee Korean facial expressions of emotions: ChaeLee-E” (<a class="elsevierStyleCrossRef" href="#bib0020">Lee et al., 2013</a>). Facial stimuli were classified according to four emotions (i.e., fear, anger, sadness, happiness) and two emotional intensities (i.e., full-blown, mild). We created facial stimuli with mild intensity by morphing the neutral and full-blown expressions using computer software (Sqirlz Morph: <a href="http://www.xiberpix.com">www.xiberpix.com</a>). Through the program, we generated 51 images with intensity levels ranging from neutral to full-blown emotion with 2% increment steps (Supplementary Fig. 1A). Among the 51 images, we defined facial stimulus with a 50% intensity level as mild. Facial stimuli at a size of 4.7 × 5.7 cm were placed at the center of a white background of 25.4 × 16.9 cm (Supplementary Fig. 1B). The question “What is the emotion of the face in the image?” was presented for 5 s before facial stimuli were presented. We then required participants to respond as quickly and accurately as possible to the question. To select fear, anger, sadness, or happiness, participants were to type 1, 3, 5, or 7, respectively, on the keyboard. After participants pushed one of the four buttons, the trial moved on to the next stimulus. If participants did not select a facial emotion for 3 s, the trial was treated as an incorrect answer and then moved to the next. Between each trial was a 2-s interval. This task comprised four blocks, with each block composed of eight trials. We calculated accuracy as the percentage of correct answers in the entire trial. We evaluated reaction time as the period from the time the stimulus was presented to the time the participants responded.</p></span><span id="sec0011" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleSectionTitle" id="cesectitle0017">Phase 2: emotions in context task</span><p id="para0016" class="elsevierStylePara elsevierViewall">The emotions in context task (ECT) comprised 32 colored facial stimuli within the emotional contexts (<a class="elsevierStyleCrossRef" href="#bib0031">Sasson et al., 2016</a>). Facial stimuli were classified according to four emotions (i.e., fear, anger, sadness, happiness), two emotional intensities (i.e., full-blown, mild), and four emotional contexts (i.e., fear, anger, sadness, happiness). Pictures of emotional context were taken from “CanStockPhoto” (<a href="https://www.canstockphoto.co.kr">https://www.canstockphoto.co.kr</a>). We replaced face parts in the original pictures with the eight facial stimuli used in Phase 1 (four emotions and two intensities). To validate the types and intensities of the emotional context pictures, 15 graduate students rated the relevance, arousal, and valence of the emotions using a 7-point Likert scale (0 = Not at all; 6 = Extremely). We commissioned a professional editor to integrate each of the eight facial stimuli with four emotional contexts naturally. Facial stimuli were presented at a size of 4.7 × 5.7 cm in the emotional context of 25.4 × 16.9 cm (Supplementary Fig. 1C). We divided ECT into two conditions according to congruence: congruent and incongruent. We defined the congruent condition of ECT as matching the emotion of the facial stimulus with the emotion of the context (e.g., a happy face in a happy context). A mismatch between the emotion of the facial stimulus and of the context (e.g., a happy face in an angry context) would be an incongruent condition. We asked participants to respond as quickly and accurately as possible to the emotion of the facial stimulus in the presented image. The selection had to be made by considering only the emotion of the facial stimulus without considering that the context. To select fear, anger, sadness, or happiness, participants were to type answers as 1, 3, 5, and 7 on the keyboard. If participants did not select a facial emotion for 3 s, the trial was treated as an incorrect answer and then moved to the next. Between each trial was a 2-s interval. This task comprised two blocks and each block comprised 32 trials. Accuracy was calculated as the percentage of correct answers in the entire trial. We evaluated reaction time as the period from the time the stimulus was presented to the time participants responded. We measured accuracy and reaction time by dividing congruent and incongruent conditions.</p></span></span><span id="sec0012" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleSectionTitle" id="cesectitle0018">Apparatus</span><p id="para0017" class="elsevierStylePara elsevierViewall">Detailed information on the apparatus was presented in supplementary material.</p></span><span id="sec0013" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleSectionTitle" id="cesectitle0019">Procedure</span><p id="para0018" class="elsevierStylePara elsevierViewall">We performed this study while adhering to the principles of the Declaration of Helsinki. All participants voluntarily participated in the experiment and signed a consent form approved by the Institutional Review Board of Chung-Ang University (NO. 1,041,078–202,108-HRSB- 273–01). Detailed information on the procedure was presented in supplementary material.</p></span><span id="sec0014" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleSectionTitle" id="cesectitle0020">Data analysis</span><p id="para0019" class="elsevierStylePara elsevierViewall">The required sample size was calculated using G*power 3.1.9.7 (<a class="elsevierStyleCrossRef" href="#bib0008">Faul et al., 2007</a>), that estimated a sample size of 54 participants as adequate for a design with mixed-design analysis of variance (ANOVA), an alpha error probability of 0.05, a power of 0.95, and a medium effect size (<span class="elsevierStyleItalic">η<span class="elsevierStyleInf">p</span><span class="elsevierStyleSup">2</span></span>= 0.25).</p><p id="para0020" class="elsevierStylePara elsevierViewall">In our data analysis, we performed an independent <span class="elsevierStyleItalic">t</span>-test and chi-square test to analyze differences in group characteristics between the BAP and control groups. To investigate the relationship between performance ability (reaction time and accuracy) and level of autistic traits (BAPQ and AQ), we performed a correlation analysis (Pearson's r). To examine the difference in emotion recognition ability between BAP and control groups, we conducted 2 (group: BAP, control) × 2 (intensity of emotion: full-blown, mild) × 4 (emotion of target: fear, anger, sadness, happiness) mixed-design analysis of covariance (ANCOVA) to analyze the data in each task. Accuracy or reaction time was used as dependent variable, and BDI-II and BAI scores as covariates. When the ANCOVA showed a significant main effect for emotion of target with 4 levels, Tukey's HSD test was conducted for a post-hoc test. When analyzing ECT results, we analyzed the congruent and incongruent conditions separately.</p><p id="para0021" class="elsevierStylePara elsevierViewall">After confirming the diminished ability to recognize facial emotions in the BAP group, we compared gaze proportions of ROIs between both groups to infer cause of the diminished ability in the BAP. The gaze proportion of face, eyes, mouth, or context was the dependent variable, and the BDI-II and BAI scores were the covariates. Gaze proportion of ROI was calculated as the percentage of duration spent gazing the ROI out of the total gazing duration. The ROIs of context were defined as the parts with valence in the background (Supplementary Fig. 2). We investigated differences in gaze proportion of the ROIs including face, eyes, mouth, and context between groups under conditions that the BAP group showed decreased ability to recognize emotions. All analyses were performed using R Statistical Software (v4.1.2; R Core Team 2021).</p><p id="para0022" class="elsevierStylePara elsevierViewall">Of all participants, nine were removed from the analysis through the quality check: nine participants’ error rate was more than 2 SD from the average. Ultimately, we selected 37 BAP and 35 control groups for the final analysis.</p></span></span><span id="sec0015" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleSectionTitle" id="cesectitle0021">Results</span><span id="sec0016" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleSectionTitle" id="cesectitle0022">Group characteristics</span><p id="para0023" class="elsevierStylePara elsevierViewall"><a class="elsevierStyleCrossRef" href="#tbl0001">Table 1</a> shows the demographic and clinical characteristics of participants acquired from the results of the <span class="elsevierStyleItalic">t</span>-test and the chi-square test.</p><elsevierMultimedia ident="tbl0001"></elsevierMultimedia><p id="para0024" class="elsevierStylePara elsevierViewall">We found significant differences in average BAPQ and total AQ scores between groups. Hence, groups were properly divided according to their autistic traits. No significant difference was found in age and sex. Significant differences in BDI-II and BAI were also found. These variables could affect the dependent variables such as reaction time or accuracy. We analyzed these after adding BDI-II and BAI scores as covariates in each analysis.</p></span><span id="sec0017" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleSectionTitle" id="cesectitle0023">Correlation analysis</span><p id="para0025" class="elsevierStylePara elsevierViewall">We conducted a Pearson correlation analysis to examine the relationship between level of autistic traits and performance ability (<a class="elsevierStyleCrossRef" href="#fig0001">Fig. 1</a>; Supplementary Table 1).</p><elsevierMultimedia ident="fig0001"></elsevierMultimedia><p id="para0026" class="elsevierStylePara elsevierViewall">No significant correlation between the level of autistic traits and reaction time or accuracy was confirmed in Phase 1.</p><p id="para0027" class="elsevierStylePara elsevierViewall">We found significant positive correlations between some subscales of autistic traits and reaction time when examining the relationship in the congruent condition of Phase 2. Hence, the higher level of autistic traits, the slower the emotion recognition speed. However, no significant correlation was found between the level of autistic traits and accuracy in the congruent condition of Phase 2.</p><p id="para0028" class="elsevierStylePara elsevierViewall">We found significant positive correlations between some subscales of autistic traits and reaction time when investigating the relationship in an incongruent condition of Phase 2. Significant negative correlations were also found between some subscales of autistic traits and accuracy in the incongruent condition of Phase 2. These results indicated that the higher the level of autistic traits, the slower the emotion recognition speed, and the lower the accuracy of emotion recognition.</p><p id="para0029" class="elsevierStylePara elsevierViewall">In summary, the level of autistic traits can be most significantly correlated with emotion recognition ability when performing the most difficult task wherein the emotion of the facial stimulus and emotion of the context do not match.</p></span><span id="sec0018" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleSectionTitle" id="cesectitle0024">Comparing performance ability between groups in each task</span><span id="sec0019" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleSectionTitle" id="cesectitle0025">Phase 1: emotion recognition task</span><p id="para0030" class="elsevierStylePara elsevierViewall">To examine reaction time and accuracy of emotion recognition, we performed a 2 × 2 × 4 mixed-design ANCOVA on reaction time and accuracy (Supplementary Table 2).</p><p id="para0031" class="elsevierStylePara elsevierViewall">When examining reaction time, we found a significant main effect of intensity. Hence, stimuli with mild emotional intensity were recognized significantly slower than the stimuli with full-blown emotional intensity. A significant main effect of emotion was identified. The <span class="elsevierStyleItalic">happy</span> emotion was recognized significantly faster than the <span class="elsevierStyleItalic">sad</span> emotion (Supplementary Table 3).</p><p id="para0032" class="elsevierStylePara elsevierViewall">When investigating the accuracy, a significant interaction effect between intensity × emotion was found. When post hoc tests were performed, stimuli with full-blown emotional intensity were more accurately recognized than stimuli with mild emotional intensity when recognizing the emotions <span class="elsevierStyleItalic">happy, sad</span>, and <span class="elsevierStyleItalic">fearful</span>. However, no difference between intensities was found when recognizing the <span class="elsevierStyleItalic">angry</span> emotion (Supplementary Table 4). We identified a significant main effect of emotion. However, no significant difference was found between emotions when post hoc tests was conducted (Supplementary Table 3).</p><p id="para0033" class="elsevierStylePara elsevierViewall">In summary, when performing Phase 1 (ERT), no difference in emotion recognition ability between groups was found. Hence, when performing a simple task where facial stimuli are presented alone, the BAP group can recognize emotions as accurately and quickly as the general population.</p></span><span id="sec0020" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleSectionTitle" id="cesectitle0026">Phase 2: emotions in context task (Congruent condition)</span><p id="para0034" class="elsevierStylePara elsevierViewall">To examine the reaction time and accuracy of emotion recognition, we performed a 2 × 2 × 4 mixed-design ANCOVA on reaction time and accuracy (Supplementary Table 2).</p><p id="para0035" class="elsevierStylePara elsevierViewall">When examining the reaction time, we confirmed a significant main effect of intensity. Hence, stimuli with mild emotional intensity were recognized slower than stimuli with full-blown emotional intensity. Moreover, a significant main effect of emotion was found. <span class="elsevierStyleItalic">Happy</span> emotions were recognized faster than other emotions, and <span class="elsevierStyleItalic">angry</span> emotions were recognized faster than <span class="elsevierStyleItalic">sad</span> emotions (Supplementary Table 3).</p><p id="para0036" class="elsevierStylePara elsevierViewall">When investigating the accuracy, we confirmed a significant two-way interaction effect between group × intensity. To examine the interaction effect in terms of our study objectives, we analyzed the main effects of the group in each intensity. While we identified no difference between groups in full-blown emotional intensity [<span class="elsevierStyleItalic">F</span> (1, 68) = 0.067, <span class="elsevierStyleItalic">p</span> = .797], we confirmed a significant difference between groups in mild emotional intensity [<span class="elsevierStyleItalic">F</span> (1, 68) = 6.230, <span class="elsevierStyleItalic">p</span> = .015, <span class="elsevierStyleItalic">η<span class="elsevierStyleInf">p</span><span class="elsevierStyleSup">2</span></span> = 0.084] (<a class="elsevierStyleCrossRef" href="#fig0002">Fig. 2</a>; <a class="elsevierStyleCrossRef" href="#tbl0002">Table 2</a>). Hence, while the BAP group could recognize emotions with high intensity accurately as the control group, the BAP group were less accurate in recognizing the emotions than the control group wherein facial expressions are represented with mild intensity. We also confirmed a significant interaction effect between intensity × emotion, indicating that stimuli with full-blown emotional intensity were more accurately recognized than stimuli with mild emotional intensity when recognizing the emotions <span class="elsevierStyleItalic">happy, sad</span>, and <span class="elsevierStyleItalic">fearful</span>. However, no difference between both intensities when recognizing <span class="elsevierStyleItalic">angry</span> emotion was found (Supplementary Table 4). A significant main effect of intensity was identified, indicating that emotions with mild intensity were recognized less accurately than emotions with full-blown intensity. A significant main effect of emotion was found. However, when post hoc tests were conducted, no significant difference between emotions were found (Supplementary Table 3).</p><elsevierMultimedia ident="fig0002"></elsevierMultimedia><elsevierMultimedia ident="tbl0002"></elsevierMultimedia><p id="para0037" class="elsevierStylePara elsevierViewall">In summary, when performing the congruent condition of Phase 2 (ECT), the BAP group could not recognize the emotions with mild intensity accurately compared to the control group.</p></span><span id="sec0021" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleSectionTitle" id="cesectitle0027">Phase 2: emotions in context task (Incongruent condition)</span><p id="para0038" class="elsevierStylePara elsevierViewall">To examine reaction time and accuracy of recognition of facial expressions, we performed a 2 × 2 × 4 mixed-design ANCOVA on reaction time and accuracy (Supplementary Table 2).</p><p id="para0039" class="elsevierStylePara elsevierViewall">When examining reaction time, the main effect of emotion was significant<span class="elsevierStyleItalic">. Fearful</span> emotions were recognized slower than other emotions, and <span class="elsevierStyleItalic">sad</span> emotions were recognized slower than <span class="elsevierStyleItalic">happy</span> emotions (Supplementary Table 3).</p><p id="para0040" class="elsevierStylePara elsevierViewall">When investigating the accuracy, between group × intensity had a significant two-way interaction effect. To examine the interaction effect in terms of the purpose of our study, we analyzed the main effects of the group in each intensity. While no difference between BAP and control groups in faces with high intensity was found [<span class="elsevierStyleItalic">F</span> (1, 68) = 0.380, <span class="elsevierStyleItalic">p</span> = .539], a significant main effect between groups in faces with mild intensity was confirmed [<span class="elsevierStyleItalic">F</span> (1, 68) = 5.009, <span class="elsevierStyleItalic">p</span> = .028, <span class="elsevierStyleItalic">η<span class="elsevierStyleInf">p</span><span class="elsevierStyleSup">2</span></span> = 0.069] (<a class="elsevierStyleCrossRef" href="#fig0002">Fig. 2</a>; <a class="elsevierStyleCrossRef" href="#tbl0002">Table 2</a>). Basically, while the BAP group could recognize full-blown emotions as accurately as the control group, they could not recognize mild intensity emotions as accurately as the control group. The interaction effect between intensity × emotion was also significant, indicating that emotions with full-blown intensity were more accurately recognized than emotions with mild intensity when recognizing <span class="elsevierStyleItalic">happy, sad</span>, and <span class="elsevierStyleItalic">fearful</span> emotions. However, no difference between both intensities was found when recognizing <span class="elsevierStyleItalic">angry</span> emotions (Supplementary Table 4). The main effect of intensity was significant. Hence, we recognized stimuli with mild emotional intensity less accurately compared to stimuli with full-blown emotional intensity. The main effect of emotion was also significant, indicating that fearful emotion was recognized less accurately than other emotions (Supplementary Table 3).</p><p id="para0041" class="elsevierStylePara elsevierViewall">In summary, when performing the incongruent condition of Phase 2 (ECT), the BAP group could not recognize the emotion with mild intensity accurately compared to the control group.</p><p id="para0042" class="elsevierStylePara elsevierViewall">Therefore, when performing a complex task where facial stimuli with mild emotional intensity were presented with a context, the BAP group were less accurate in recognizing the emotions.</p></span><span id="sec0022" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleSectionTitle" id="cesectitle0028">Phase 2: emotions in context task (Congruent & incongruent condition)</span><p id="para0043" class="elsevierStylePara elsevierViewall">From the previous analyses, we confirmed that the BAP group was less accurate than the control group in recognizing emotions with mild intensity in both congruent and incongruent conditions in phase 2. To investigate whether these results could vary by congruence, we performed a 2 (group; BAP, control) × 2 (congruence; congruent, incongruent) mixed-design ANCOVA on accuracy in phase 2.</p><p id="para0044" class="elsevierStylePara elsevierViewall">There was no interaction effect of group × congruence [<span class="elsevierStyleItalic">F</span> (1, 68) = 0.002, <span class="elsevierStyleItalic">p</span> = .965]. However, there were significant main effects of group [<span class="elsevierStyleItalic">F</span> (1, 68) = 8.195, <span class="elsevierStyleItalic">p</span> = .006, <span class="elsevierStyleItalic">η<span class="elsevierStyleInf">p</span><span class="elsevierStyleSup">2</span></span> = 0.076] and congruence [<span class="elsevierStyleItalic">F</span> (1, 68) = 5.617, <span class="elsevierStyleItalic">p</span> = .021, <span class="elsevierStyleItalic">η<span class="elsevierStyleInf">p</span><span class="elsevierStyleSup">2</span></span> = 0.026]. These indicate that the BAP group couldn't recognize facial stimuli with mild emotional intensity compared to the control group, regardless of emotional congruence between target and context, and both groups could recognize better when the targets were presented in congruent context than incongruent context.</p></span></span><span id="sec0023" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleSectionTitle" id="cesectitle0029">Investigating the difference of gaze proportion between groups</span><p id="para0045" class="elsevierStylePara elsevierViewall">We found that the BAP group were less accurate in recognizing the emotions than the control group when the emotions are represented with mild intensity in the congruent and incongruent conditions of ECT. To infer the cause of this discrepancy, we examined the differences in gaze proportion of the face, eyes, mouth, and context between groups when emotions are represented with mild intensity in the ECT (<a class="elsevierStyleCrossRef" href="#tbl0003">Table 3</a>).</p><elsevierMultimedia ident="tbl0003"></elsevierMultimedia><p id="para0046" class="elsevierStylePara elsevierViewall">When examining the congruent condition of ECT, we confirmed a significant difference in gaze proportion of the face between groups. However, no difference in gaze proportion of the eyes, mouth, and context between groups was found (<a class="elsevierStyleCrossRef" href="#fig0003">Fig. 3</a>).</p><elsevierMultimedia ident="fig0003"></elsevierMultimedia><p id="para0047" class="elsevierStylePara elsevierViewall">When investigating the incongruent condition of ECT, no difference in gaze proportion of the face, eyes, mouth, and context between groups was found (<a class="elsevierStyleCrossRef" href="#fig0003">Fig. 3</a>).</p><p id="para0048" class="elsevierStylePara elsevierViewall">Since there was a significant difference of accuracy between the BAP and control groups in both the congruent and incongruent conditions, we combined the conditions and conducted a 2 (group; BAP, control) × 2 (congruence; congruent, incongruent) mixed-design ANCOVA on gaze proportion of the face, eyes, mouth, and context (Supplementary Table 5). There was no interaction effect of group × congruence and no main effect of congruence. However, there was a significant main effect of group for face, indicating that the BAP group looked at the face shorter than the control group, regardless of emotional congruence between target and context.</p></span></span><span id="sec0024" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleSectionTitle" id="cesectitle0030">Discussion</span><p id="para0049" class="elsevierStylePara elsevierViewall">This study examined whether individuals with BAP showed a diminished ability to recognize emotions compared to the general population as the task becomes more difficult. This study also identified whether the reduced ability to recognize emotions was related to the gaze pattern on the face. This study corroborated that our hypotheses were supported. First, the more difficult the task, the clearer the significant relationships between the level of autistic traits and emotion recognition ability. Especially, the BAP group showed lower accuracy of emotion recognition when recognizing emotions with low emotional intensity in complex situations where faces and contexts were presented together. Second, the reduced emotion recognition ability of the BAP group could be accompanied by decreased gaze proportion on face.</p><p id="para0050" class="elsevierStylePara elsevierViewall">We divided tasks into three types according to difficulty level and investigated correlation analysis between the level of autistic traits and emotion recognition ability in each type. Additionally, difference in emotion recognition ability between BAP and control groups was checked in each type. Through these processes, we confirmed that the first hypothesis of this study was supported. These are comprehensive results that can explain why some studies have shown that the BAP group had similar emotion recognition ability to the general population (<a class="elsevierStyleCrossRef" href="#bib0035">Soto-Icaza et al., 2022</a>), while some found that the BAP group had lower emotion recognition ability than the general population (<a class="elsevierStyleCrossRef" href="#bib0037">Stewart et al., 2020</a>). While individuals with BAP have mild autistic characteristics that do not show clinical impairment, the BAP group was confirmed to be different from the general population. By varying task difficulty, we could clearly confirm this characteristic of the individuals with BAP. The BAP group showed a normal level of emotion recognition ability in the easy task, but they showed diminished emotion recognition ability in the difficult task. People usually reveal their emotions along with various external factors including body gestures or voices, and they rarely express their emotions in a full-blown manner (<a class="elsevierStyleCrossRef" href="#bib0022">Mondada, 2016</a>). Social interaction with others is composed of various emotional elements and successful social interaction is possible only when these are fully understood. The reason that the BAP group is unable to adequately engage in social interaction may be from their inability to identify the complex social interaction related to emotion recognition. As this inability can induce depressive and anxiety disorders, our results suggest that the BAP group should be considered as a priority group in need and an extension of ASD.</p><p id="para0051" class="elsevierStylePara elsevierViewall">Notably, this study corroborated that the diminished ability to recognize emotion in the BAP group could be accompanied by their diminished gaze proportion on face. We confirmed the BAP group looked less at face than the control group in the congruent condition of Phase 2 and there was a main effect of group in group × congruence ANCOVA on the gaze proportion of face. These results raise the possibility that a decrease in the gaze proportion on faces may be associated with a decrease in the accuracy of emotion recognition. People can express their emotions primarily through their faces, so it is effective to recognize others’ emotions through their faces in social interaction (<a class="elsevierStyleCrossRef" href="#bib0007">Costa-Cordella, 2021</a>; <a class="elsevierStyleCrossRef" href="#bib0009">Garcia-Garcia et al., 2022</a>; <a class="elsevierStyleCrossRef" href="#bib0022">Mondada, 2016</a>). Additionally, for effective emotion perception, participants were required to select a part of the face in a complex social interaction environment, sustain attention, and obtain information about another's emotional state (<a class="elsevierStyleCrossRef" href="#bib0023">Morrison et al., 1988</a>). Indeed, it has been reported that gaze duration on faces is associated with facial emotion recognition (<a class="elsevierStyleCrossRef" href="#bib0036">Stanley et al., 2013</a>). Previous eye-tracking studies in ASD group have reported that individuals with ASD look at face shorter than general population (<a class="elsevierStyleCrossRef" href="#bib0014">Kaliukhovich et al., 2020</a>; <a class="elsevierStyleCrossRef" href="#bib0029">Rigby et al., 2016</a>), and that the shorter gaze on face is associated with poorer emotion recognition and deficits in social interaction (<a class="elsevierStyleCrossRef" href="#bib0024">Murias et al., 2018</a>; <a class="elsevierStyleCrossRef" href="#bib0031">Sasson et al., 2016</a>). According to our results, the BAP group also looked at the face for a shorter time than the general population in emotion recognition. This tendency of the BAP group may contribute to their diminished emotion recognition ability.</p><p id="para0052" class="elsevierStylePara elsevierViewall">However, there was no significant difference in gaze proportion on the face between the BAP and control groups when performing the incongruent condition of Phase 2 (ECT), even though the BAP group showed lower emotion recognition accuracy than the control group. This result may be due to the fact that emotion recognition is influenced not just by the cognitive patterns, but also by the process of interpreting emotions. People process faces based on two procedures (<a class="elsevierStyleCrossRef" href="#bib0011">Haxby et al., 2000</a>). The core system, the first stage of face-processing, is related to the visual analysis of faces. The extended system, the second stage, is associated with processing information (e.g., emotions or identity) from the face. The face gaze is related to the core system and the ability to recognize facial emotions is related to the extended system. While the face gaze is critical for face-processing in the core system aspect, the process of interpreting the emotion of the gazed face is also an important factor in face-processing in the extended system aspect. Our results indicate that the BAP group's lower ability to interpret faces within the extended system compared to the general population could also affect their lower emotion recognition accuracy.</p><p id="para0053" class="elsevierStylePara elsevierViewall">This study didn't find a significant difference in gaze proportion on context between the BAP and control groups. Based on previous studies, individuals with BAP as well as ASD gazed less at face and more at other contexts compared to general population (<a class="elsevierStyleCrossRef" href="#bib0014">Kaliukhovich et al., 2020</a>; <a class="elsevierStyleCrossRef" href="#bib0025">Nayar et al., 2022</a>; <a class="elsevierStyleCrossRef" href="#bib0029">Rigby et al., 2016</a>; <a class="elsevierStyleCrossRef" href="#bib0034">Shic et al., 2011</a>). The reason why there was no difference in gaze proportion on context between groups in this study may be related to the instruction of the task. Previous studies reporting higher gaze proportion on context in autism groups used free-viewing task, so that all participants gazed the social images free. On the contrary, this study used emotion recognition tasks that instructed participants to respond as quickly and accurately as possible to the emotion of the face presented in the center. Both the BAP and control groups would have been looking at the face and trying to determine the emotion of the face as quickly as possible, rather than paying attention to the context. Future research is needed to investigate whether the BAP group may have different patterns of gazing at faces and contexts depending on the type of task (free-viewing task and emotion recognition task).</p><p id="para0054" class="elsevierStylePara elsevierViewall">In this study, the BAP as well as control groups recognized happy emotion faster than other emotions, and recognized fearful emotion slower and less accurately than other emotions. These results indicate that although the BAP group showed a decrease in emotion recognition compared to the control group, but the pattern of difference in recognition ability between emotion types were similar to the control group. Previous studies reporting the diminished ability to recognize emotion in individuals with BAP showed that the BAP group recognized happy faces faster than sad and fearful faces, like the control group, and other study found no interaction effect of group × emotion on the accuracy of emotion recognition (<a class="elsevierStyleCrossRef" href="#bib0006">Cha & Lee, 2022</a>; <a class="elsevierStyleCrossRef" href="#bib0031">Sasson et al., 2016</a>). Moreover, previous study in the general population reported that people are able to recognize happy faces faster and more accurately than other emotions, while recognizing fearful emotions slower and more inaccurately (<a class="elsevierStyleCrossRef" href="#bib0038">Wells et al., 2016</a>). Taken together with these studies, the results of the present study suggest that the BAP group may not be specifically impaired for certain emotions compared to the control group, but rather show an overall impairment in emotion recognition, and that the differences in recognition across emotions exhibited by the individuals with BAP may be similar to the pattern seen in the general population.</p><p id="para0055" class="elsevierStylePara elsevierViewall">This study had a few limitations. First, while this study suggested that the decreased emotion recognition in the BAP group might have resulted from a decrease in gaze proportion on face, it had not examined the direct relationship between the two variables by setting gaze proportion on face as an independent variable. Follow-up studies are needed to verify that reduced gaze proportion on faces resulted in diminished emotion recognition in the individuals with BAP based on direct comparison. Second, although the levels of anxiety and depression were statistically controlled when analyzing, the anxiety and depression may have induced an attention bias toward certain emotions, especially negative emotions (<a class="elsevierStyleCrossRef" href="#bib0005">Bergman et al., 2021</a>). In future studies, depression and anxiety levels should also be matched when recruiting BAP and control groups for analysis. Third, in this study, individuals with high and low BAPQ/AQ scores were selected for the study. However, since individuals with BAP are located between the ASD group and the general population at various levels of the spectrum, it may be more effective to identify the characteristics of the BAP population by taking a dimensional approach rather than categorizing them as high or low. Future research should attempt to characterize the BAP population using a dimensional approach based on a larger sample size. Fourth, this study confirmed that no significant difference between the BAP and control groups could be found in the gaze proportion at the eyes and mouth, contrary to previous studies based on ASD (<a class="elsevierStyleCrossRef" href="#bib0033">Setien-Ramos et al., 2022</a>; <a class="elsevierStyleCrossRef" href="#bib0028">Riddiford et al., 2022</a>). As some studies have reported that the BAP group also looked at the eyes less and mouth longer than the general population, follow-up studies will be needed to confirm the relationship between the BAP and the gaze pattern on the eyes and mouth (<a class="elsevierStyleCrossRef" href="#bib0021">Merin et al., 2007</a>). Fifth, in this study, the BAP and control groups didn't show a significant difference between mild and full-blown intensity when recognizing angry emotion, unlike other emotions. Based on the current literature, there is no clear explanation for why this result was only found for angry emotion, and further research is needed to clarify the reason.</p><p id="para0056" class="elsevierStylePara elsevierViewall">This study highlights the decline in the emotion recognition ability of the BAP by presenting context and using facial stimuli with mild intensity. Additionally, this study confirmed that this decline could be influenced by their gaze patterns. Previous studies reported that individuals with BAP had depression and anxiety owing to difficulties in social interaction. This study can provide evidence for why individuals with BAP experience difficulties in social interactions. Moreover, these results offer evidence that intervention which provides feedback to the BAP group to look at faces in the emotion recognition process will be effective. We can expect that intervention based on the real-time eye-feedback skill can modify eye movements and improve individuals’ ability to recognize emotions by increasing their gaze proportion on faces.</p></span><span id="sec0025" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleSectionTitle" id="cesectitle0031">Funding</span><p id="para0057" class="elsevierStylePara elsevierViewall">This work was supported by the Ministry of Education of the Republic of Korea and the National Research Foundation of Korea (2019R1I1A1A01062451).</p></span></span>" "textoCompletoSecciones" => array:1 [ "secciones" => array:8 [ 0 => array:3 [ "identificador" => "xres1983001" "titulo" => "Abstract" "secciones" => array:4 [ 0 => array:2 [ "identificador" => "abss0001" "titulo" => "Background/objective" ] 1 => array:2 [ "identificador" => "abss0002" "titulo" => "Method" ] 2 => array:2 [ "identificador" => "abss0003" "titulo" => "Results" ] 3 => array:2 [ "identificador" => "abss0004" "titulo" => "Conclusion" ] ] ] 1 => array:2 [ "identificador" => "xpalclavsec1703785" "titulo" => "Keywords" ] 2 => array:2 [ "identificador" => "sec0001" "titulo" => "Introduction" ] 3 => array:3 [ "identificador" => "sec0002" "titulo" => "Method" "secciones" => array:6 [ 0 => array:2 [ "identificador" => "sec0003" "titulo" => "Participants" ] 1 => array:3 [ "identificador" => "sec0004" "titulo" => "Questionnaires and measurement" "secciones" => array:4 [ 0 => array:2 [ "identificador" => "sec0005" "titulo" => "Broad autism phenotype questionnaire (BAPQ)" ] 1 => array:2 [ "identificador" => "sec0006" "titulo" => "Autism-spectrum quotient (AQ)" ] 2 => array:2 [ "identificador" => "sec0007" "titulo" => "Beck depression inventory-second edition (BDI-II)" ] 3 => array:2 [ "identificador" => "sec0008" "titulo" => "Beck anxiety inventory (BAI)" ] ] ] 2 => array:3 [ "identificador" => "sec0009" "titulo" => "Behavioral task" "secciones" => array:2 [ 0 => array:2 [ "identificador" => "sec0010" "titulo" => "Phase 1: emotion recognition task" ] 1 => array:2 [ "identificador" => "sec0011" "titulo" => "Phase 2: emotions in context task" ] ] ] 3 => array:2 [ "identificador" => "sec0012" "titulo" => "Apparatus" ] 4 => array:2 [ "identificador" => "sec0013" "titulo" => "Procedure" ] 5 => array:2 [ "identificador" => "sec0014" "titulo" => "Data analysis" ] ] ] 4 => array:3 [ "identificador" => "sec0015" "titulo" => "Results" "secciones" => array:4 [ 0 => array:2 [ "identificador" => "sec0016" "titulo" => "Group characteristics" ] 1 => array:2 [ "identificador" => "sec0017" "titulo" => "Correlation analysis" ] 2 => array:3 [ "identificador" => "sec0018" "titulo" => "Comparing performance ability between groups in each task" "secciones" => array:4 [ 0 => array:2 [ "identificador" => "sec0019" "titulo" => "Phase 1: emotion recognition task" ] 1 => array:2 [ "identificador" => "sec0020" "titulo" => "Phase 2: emotions in context task (Congruent condition)" ] 2 => array:2 [ "identificador" => "sec0021" "titulo" => "Phase 2: emotions in context task (Incongruent condition)" ] 3 => array:2 [ "identificador" => "sec0022" "titulo" => "Phase 2: emotions in context task (Congruent & incongruent condition)" ] ] ] 3 => array:2 [ "identificador" => "sec0023" "titulo" => "Investigating the difference of gaze proportion between groups" ] ] ] 5 => array:2 [ "identificador" => "sec0024" "titulo" => "Discussion" ] 6 => array:2 [ "identificador" => "sec0025" "titulo" => "Funding" ] 7 => array:1 [ "titulo" => "References" ] ] ] "pdfFichero" => "main.pdf" "tienePdf" => true "fechaRecibido" => "2023-03-27" "fechaAceptado" => "2023-07-11" "PalabrasClave" => array:1 [ "en" => array:1 [ 0 => array:4 [ "clase" => "keyword" "titulo" => "Keywords" "identificador" => "xpalclavsec1703785" "palabras" => array:5 [ 0 => "Broad autism phenotype" 1 => "Autism spectrum disorder" 2 => "Emotion recognition" 3 => "Weak central coherence theory" 4 => "Eye-tracking" ] ] ] ] "tieneResumen" => true "resumen" => array:1 [ "en" => array:3 [ "titulo" => "Abstract" "resumen" => "<span id="abss0001" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleSectionTitle" id="cesectitle0002">Background/objective</span><p id="spara010" class="elsevierStyleSimplePara elsevierViewall">Individuals with broad autism phenotype (BAP) showed a diminished ability to recognize emotion. This study aims to examine whether their decline in emotion recognition ability could be more clearly identified as task complexity increased and whether their decline could be influenced by their eye-gaze patterns.</p></span> <span id="abss0002" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleSectionTitle" id="cesectitle0003">Method</span><p id="spara011" class="elsevierStyleSimplePara elsevierViewall">41 individuals with BAP and 40 healthy controls performed two types of emotion recognition tasks. After confirming conditions wherein the BAP group did not perform well compared to the control group, we compared gaze proportion on faces and context between groups when performing the conditions.</p></span> <span id="abss0003" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleSectionTitle" id="cesectitle0004">Results</span><p id="spara012" class="elsevierStyleSimplePara elsevierViewall">The more difficult the task, the clearer the significant relationships between the level of autistic traits and emotion recognition ability. The BAP group showed lower accuracy compared to the control group when a face with mild emotional intensity was presented with context. In terms of gaze proportion, the BAP group looked less at faces when recognizing emotions compared to the control group.</p></span> <span id="abss0004" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleSectionTitle" id="cesectitle0005">Conclusion</span><p id="spara013" class="elsevierStyleSimplePara elsevierViewall">These findings indicate that diminished emotion recognition ability in individuals with BAP may be influenced by face gaze.</p></span>" "secciones" => array:4 [ 0 => array:2 [ "identificador" => "abss0001" "titulo" => "Background/objective" ] 1 => array:2 [ "identificador" => "abss0002" "titulo" => "Method" ] 2 => array:2 [ "identificador" => "abss0003" "titulo" => "Results" ] 3 => array:2 [ "identificador" => "abss0004" "titulo" => "Conclusion" ] ] ] ] "NotaPie" => array:1 [ 0 => array:3 [ "etiqueta" => "1" "nota" => "<p class="elsevierStyleNotepara" id="notep0001">Present address: Department of Neuropsychiatry, Seoul National University Hospital, Seoul 03080, Republic of Korea.</p>" "identificador" => "fn1" ] ] "apendice" => array:1 [ 0 => array:1 [ "seccion" => array:1 [ 0 => array:4 [ "apendice" => "<p id="para0059a" class="elsevierStylePara elsevierViewall"><elsevierMultimedia ident="ecom0001"></elsevierMultimedia></p>" "etiqueta" => "Appendix" "titulo" => "Supplementary materials" "identificador" => "sec0028" ] ] ] ] "multimedia" => array:7 [ 0 => array:8 [ "identificador" => "fig0001" "etiqueta" => "Fig. 1" "tipo" => "MULTIMEDIAFIGURA" "mostrarFloat" => true "mostrarDisplay" => false "figura" => array:1 [ 0 => array:4 [ "imagen" => "gr1.jpeg" "Alto" => 1809 "Ancho" => 2667 "Tamanyo" => 230391 ] ] "detalles" => array:1 [ 0 => array:3 [ "identificador" => "alt0001" "detalle" => "Fig " "rol" => "short" ] ] "descripcion" => array:1 [ "en" => "<p id="spara001" class="elsevierStyleSimplePara elsevierViewall">Correlation heatmap between ability to recognize emotions and autistic traits. P1: Phase 1 (emotion recognition task); P2: Phase 2 (emotions in context task); con: congruent condition; incon: incongruent condition. *<span class="elsevierStyleItalic">p</span> < .05. **<span class="elsevierStyleItalic">p</span> < .01.</p>" ] ] 1 => array:8 [ "identificador" => "fig0002" "etiqueta" => "Fig. 2" "tipo" => "MULTIMEDIAFIGURA" "mostrarFloat" => true "mostrarDisplay" => false "figura" => array:1 [ 0 => array:4 [ "imagen" => "gr2.jpeg" "Alto" => 1239 "Ancho" => 2667 "Tamanyo" => 140741 ] ] "detalles" => array:1 [ 0 => array:3 [ "identificador" => "alt0002" "detalle" => "Fig " "rol" => "short" ] ] "descripcion" => array:1 [ "en" => "<p id="spara002" class="elsevierStyleSimplePara elsevierViewall">Comparison of accuracy between groups under the condition of intensity of emotion in congruent and incongruent conditions of ECT. BAP, broad autism phenotype group; Control, control group. *<span class="elsevierStyleItalic">p</span> < .05. **<span class="elsevierStyleItalic">p</span> < .01. ***<span class="elsevierStyleItalic">p</span> < .001.</p>" ] ] 2 => array:8 [ "identificador" => "fig0003" "etiqueta" => "Fig. 3" "tipo" => "MULTIMEDIAFIGURA" "mostrarFloat" => true "mostrarDisplay" => false "figura" => array:1 [ 0 => array:4 [ "imagen" => "gr3.jpeg" "Alto" => 2239 "Ancho" => 2667 "Tamanyo" => 292274 ] ] "detalles" => array:1 [ 0 => array:3 [ "identificador" => "alt0003" "detalle" => "Fig " "rol" => "short" ] ] "descripcion" => array:1 [ "en" => "<p id="spara003" class="elsevierStyleSimplePara elsevierViewall">Comparison of gaze proportion between groups under the condition of mild emotional intensity in congruent and incongruent conditions of ECT. BAP, broad autism phenotype group; Control, control group. **<span class="elsevierStyleItalic">p</span> < .01.</p>" ] ] 3 => array:8 [ "identificador" => "tbl0001" "etiqueta" => "Table 1" "tipo" => "MULTIMEDIATABLA" "mostrarFloat" => true "mostrarDisplay" => false "detalles" => array:1 [ 0 => array:3 [ "identificador" => "alt0004" "detalle" => "Table " "rol" => "short" ] ] "tabla" => array:2 [ "leyenda" => "<p id="spara005" class="elsevierStyleSimplePara elsevierViewall">Note. Mean (standard deviation); BAP: Broad Autism Phenotype Group, BAPQ: Broad Autism Phenotype Questionnaire; AQ: Autism Spectrum Quotient; BDI-II: Beck Depression Inventory-II; BAI: Beck Anxiety Inventory.</p>" "tablatextoimagen" => array:1 [ 0 => array:2 [ "tabla" => array:1 [ 0 => """ <table border="0" frame="\n \t\t\t\t\tvoid\n \t\t\t\t" class=""><thead title="thead"><tr title="table-row"><a name="en0001"></a><th class="td-with-role" title="\n \t\t\t\t\ttable-head\n \t\t\t\t ; entry_with_role_rowgroup colgroup " rowspan="2" align="left" valign="top" scope="col" colspan="2" style="border-bottom: 2px solid black">Measure</th><a name="en0002"></a><th class="td-with-role" title="\n \t\t\t\t\ttable-head\n \t\t\t\t ; entry_with_role_rowgroup " rowspan="2" align="left" valign="top" scope="col" style="border-bottom: 2px solid black">BAP (<span class="elsevierStyleItalic">n</span> = 37)</th><a name="en0003"></a><th class="td-with-role" title="\n \t\t\t\t\ttable-head\n \t\t\t\t ; entry_with_role_rowgroup " rowspan="2" align="left" valign="top" scope="col" style="border-bottom: 2px solid black">Control (<span class="elsevierStyleItalic">n</span> = 35)</th><a name="en0004"></a><th class="td-with-role" title="\n \t\t\t\t\ttable-head\n \t\t\t\t ; entry_with_role_colgroup " colspan="2" align="left" valign="top" scope="col">Test Statistics</th></tr><tr title="table-row"><a name="en0008"></a><th class="td" title="\n \t\t\t\t\ttable-head\n \t\t\t\t " align="" valign="top" scope="col" style="border-bottom: 2px solid black"><span class="elsevierStyleItalic">t</span> / <span class="elsevierStyleItalic">χ2</span> \t\t\t\t\t\t\n \t\t\t\t\t\t</th><a name="en0009"></a><th class="td" title="\n \t\t\t\t\ttable-head\n \t\t\t\t " align="" valign="top" scope="col" style="border-bottom: 2px solid black"><span class="elsevierStyleItalic">p</span> \t\t\t\t\t\t\n \t\t\t\t\t\t</th></tr></thead><tbody title="tbody"><tr title="table-row"><a name="en0010"></a><td class="td-with-role" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t ; entry_with_role_colgroup " colspan="2" align="left" valign="top">Age (years)</td><a name="en0011"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">22.189 (2.885) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0012"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">22.629 (2.224) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0013"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">−0.726 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0014"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.470 \t\t\t\t\t\t\n \t\t\t\t</td></tr><tr title="table-row"><a name="en0015"></a><td class="td-with-role" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t ; entry_with_role_colgroup " colspan="2" align="left" valign="top">Sex (male/female)</td><a name="en0016"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">12 / 25 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0017"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">15 / 20 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0018"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.448 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0019"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.503 \t\t\t\t\t\t\n \t\t\t\t</td></tr><tr title="table-row"><a name="en0020"></a><td class="td-with-role" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t ; entry_with_role_colgroup " colspan="2" align="left" valign="top">BAPQ</td><a name="en0021"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">3.678 (0.320) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0022"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">2.451 (0.363) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0023"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">15.179 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0024"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">< 0.001 \t\t\t\t\t\t\n \t\t\t\t</td></tr><tr title="table-row"><a name="en0025"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top"> \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0026"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">Aloof \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0027"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">3.829 (0.492) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0028"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">2.451 (0.480) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0029"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">11.944 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0030"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">< 0.001 \t\t\t\t\t\t\n \t\t\t\t</td></tr><tr title="table-row"><a name="en0031"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top"> \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0032"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">Pragmatic language \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0033"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">3.370 (0.414) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0034"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">2.343 (0.470) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0035"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">9.757 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0036"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">< 0.001 \t\t\t\t\t\t\n \t\t\t\t</td></tr><tr title="table-row"><a name="en0037"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top"> \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0038"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">Rigid \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0039"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">3.718 (0.534) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0040"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">2.590 (0.436) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0041"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">9.759 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0042"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">< 0.001 \t\t\t\t\t\t\n \t\t\t\t</td></tr><tr title="table-row"><a name="en0043"></a><td class="td-with-role" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t ; entry_with_role_colgroup " colspan="2" align="left" valign="top">AQ</td><a name="en0044"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">30.405 (4.885) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0045"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">14.143 (3.353) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0046"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">16.546 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0047"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">< 0.001 \t\t\t\t\t\t\n \t\t\t\t</td></tr><tr title="table-row"><a name="en0048"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top"> \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0049"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">Social skill \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0050"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">7.194 (1.600) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0051"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">2.257 (1.379) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0052"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">13.937 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0053"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">< 0.001 \t\t\t\t\t\t\n \t\t\t\t</td></tr><tr title="table-row"><a name="en0054"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top"> \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0055"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">Communication \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0056"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">5.444 (1.715) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0057"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.857 (0.944) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0058"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">14.017 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0059"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">< 0.001 \t\t\t\t\t\t\n \t\t\t\t</td></tr><tr title="table-row"><a name="en0060"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top"> \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0061"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">Imagination \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0062"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">4.472 (2.035) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0063"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">1.971 (1.403) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0064"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">6.042 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0065"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">< 0.001 \t\t\t\t\t\t\n \t\t\t\t</td></tr><tr title="table-row"><a name="en0066"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top"> \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0067"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">Attention for detail \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0068"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">5.167 (2.490) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0069"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">4.486 (2.293) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0070"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">1.1992 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0071"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.235 \t\t\t\t\t\t\n \t\t\t\t</td></tr><tr title="table-row"><a name="en0072"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top"> \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0073"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">Attention Switching \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0074"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">7.667 (1.454) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0075"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">4.571 (1.703) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0076"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">8.227 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0077"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">< 0.001 \t\t\t\t\t\t\n \t\t\t\t</td></tr><tr title="table-row"><a name="en0078"></a><td class="td-with-role" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t ; entry_with_role_colgroup " colspan="2" align="left" valign="top">BDI-II</td><a name="en0079"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">16.162 (9.060) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0080"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">9.000 (7.276) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0081"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">3.708 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0082"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">< 0.001 \t\t\t\t\t\t\n \t\t\t\t</td></tr><tr title="table-row"><a name="en0083"></a><td class="td-with-role" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t ; entry_with_role_colgroup " colspan="2" align="left" valign="top">BAI</td><a name="en0084"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">10.027 (6.300) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0085"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">6.600 (6.713) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0086"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">2.231 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0087"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.029 \t\t\t\t\t\t\n \t\t\t\t</td></tr></tbody></table> """ ] "imagenFichero" => array:1 [ 0 => "xTab3298096.png" ] ] ] ] "descripcion" => array:1 [ "en" => "<p id="spara004" class="elsevierStyleSimplePara elsevierViewall">Demographic and clinical characteristic for each group.</p>" ] ] 4 => array:8 [ "identificador" => "tbl0002" "etiqueta" => "Table 2" "tipo" => "MULTIMEDIATABLA" "mostrarFloat" => true "mostrarDisplay" => false "detalles" => array:1 [ 0 => array:3 [ "identificador" => "alt0005" "detalle" => "Table " "rol" => "short" ] ] "tabla" => array:2 [ "leyenda" => "<p id="spara007" class="elsevierStyleSimplePara elsevierViewall">Note. BAP: Broad Autism Phenotype Group; Bold value indicates that the value is statistically significant.</p>" "tablatextoimagen" => array:1 [ 0 => array:2 [ "tabla" => array:1 [ 0 => """ <table border="0" frame="\n \t\t\t\t\tvoid\n \t\t\t\t" class=""><thead title="thead"><tr title="table-row"><a name="en0088"></a><th class="td-with-role" title="\n \t\t\t\t\ttable-head\n \t\t\t\t ; entry_with_role_rowgroup " rowspan="2" align="left" valign="top" scope="col" style="border-bottom: 2px solid black">Dependent variable</th><a name="en0089"></a><th class="td-with-role" title="\n \t\t\t\t\ttable-head\n \t\t\t\t ; entry_with_role_rowgroup " rowspan="2" align="left" valign="top" scope="col" style="border-bottom: 2px solid black">BAP (<span class="elsevierStyleItalic">n</span> = 37)</th><a name="en0090"></a><th class="td-with-role" title="\n \t\t\t\t\ttable-head\n \t\t\t\t ; entry_with_role_rowgroup " rowspan="2" align="left" valign="top" scope="col" style="border-bottom: 2px solid black">Control (<span class="elsevierStyleItalic">n</span> = 35)</th><a name="en0091"></a><th class="td-with-role" title="\n \t\t\t\t\ttable-head\n \t\t\t\t ; entry_with_role_colgroup " colspan="3" align="left" valign="top" scope="col">Test Statistics</th></tr><tr title="table-row"><a name="en0095"></a><th class="td" title="\n \t\t\t\t\ttable-head\n \t\t\t\t " align="" valign="top" scope="col" style="border-bottom: 2px solid black"><span class="elsevierStyleItalic">F</span> \t\t\t\t\t\t\n \t\t\t\t\t\t</th><a name="en0096"></a><th class="td" title="\n \t\t\t\t\ttable-head\n \t\t\t\t " align="" valign="top" scope="col" style="border-bottom: 2px solid black"><span class="elsevierStyleItalic">p</span> \t\t\t\t\t\t\n \t\t\t\t\t\t</th><a name="en0097"></a><th class="td" title="\n \t\t\t\t\ttable-head\n \t\t\t\t " align="" valign="top" scope="col" style="border-bottom: 2px solid black"><span class="elsevierStyleItalic">ηp<span class="elsevierStyleSup">2</span></span> \t\t\t\t\t\t\n \t\t\t\t\t\t</th></tr></thead><tbody title="tbody"><tr title="table-row"><a name="en0098"></a><td class="td-with-role" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t ; entry_with_role_colgroup " colspan="6" align="left" valign="top">Phase2: Congruent condition</td></tr><tr title="table-row"><a name="en0099"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">Reaction Time \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0100"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">1.520 (0.467) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0101"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">1.381 (0.434) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0102"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.213 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0103"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.646 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0104"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.001 \t\t\t\t\t\t\n \t\t\t\t</td></tr><tr title="table-row"><a name="en0105"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">Accuracy \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0106"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.814 (0.310) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0107"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.911 (0.210) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0108"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">6.230 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0109"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top"><span class="elsevierStyleBold">0.015</span> \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0110"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.084 \t\t\t\t\t\t\n \t\t\t\t</td></tr><tr title="table-row"><a name="en0111"></a><td class="td-with-role" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t ; entry_with_role_colgroup " colspan="6" align="left" valign="top">Phase2: Incongruent condition</td></tr><tr title="table-row"><a name="en0112"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">Reaction Time \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0113"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">1.579 (0.440) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0114"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">1.407 (0.364) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0115"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.651 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0116"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.423 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0117"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.011 \t\t\t\t\t\t\n \t\t\t\t</td></tr><tr title="table-row"><a name="en0118"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">Accuracy \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0119"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.693 (0.317) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0120"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.801 (0.254) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0121"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">5.009 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0122"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top"><span class="elsevierStyleBold">0.028</span> \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0123"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.069 \t\t\t\t\t\t\n \t\t\t\t</td></tr></tbody></table> """ ] "imagenFichero" => array:1 [ 0 => "xTab3298098.png" ] ] ] ] "descripcion" => array:1 [ "en" => "<p id="spara006" class="elsevierStyleSimplePara elsevierViewall">Reaction time and accuracy under condition where facial stimuli were presented with mild emotional intensity.</p>" ] ] 5 => array:8 [ "identificador" => "tbl0003" "etiqueta" => "Table 3" "tipo" => "MULTIMEDIATABLA" "mostrarFloat" => true "mostrarDisplay" => false "detalles" => array:1 [ 0 => array:3 [ "identificador" => "alt0006" "detalle" => "Table " "rol" => "short" ] ] "tabla" => array:2 [ "leyenda" => "<p id="spara009" class="elsevierStyleSimplePara elsevierViewall">Note. BAP: Broad Autism Phenotype Group; Bold value indicates that the value is statistically significant.</p>" "tablatextoimagen" => array:1 [ 0 => array:2 [ "tabla" => array:1 [ 0 => """ <table border="0" frame="\n \t\t\t\t\tvoid\n \t\t\t\t" class=""><thead title="thead"><tr title="table-row"><a name="en0124"></a><th class="td-with-role" title="\n \t\t\t\t\ttable-head\n \t\t\t\t ; entry_with_role_rowgroup " rowspan="2" align="left" valign="top" scope="col" style="border-bottom: 2px solid black">Dependent variable</th><a name="en0125"></a><th class="td-with-role" title="\n \t\t\t\t\ttable-head\n \t\t\t\t ; entry_with_role_rowgroup " rowspan="2" align="left" valign="top" scope="col" style="border-bottom: 2px solid black">BAP (<span class="elsevierStyleItalic">n</span> = 37)</th><a name="en0126"></a><th class="td-with-role" title="\n \t\t\t\t\ttable-head\n \t\t\t\t ; entry_with_role_rowgroup " rowspan="2" align="left" valign="top" scope="col" style="border-bottom: 2px solid black">Control (<span class="elsevierStyleItalic">n</span> = 35)</th><a name="en0127"></a><th class="td-with-role" title="\n \t\t\t\t\ttable-head\n \t\t\t\t ; entry_with_role_colgroup " colspan="3" align="left" valign="top" scope="col">Test Statistics</th></tr><tr title="table-row"><a name="en0131"></a><th class="td" title="\n \t\t\t\t\ttable-head\n \t\t\t\t " align="" valign="top" scope="col" style="border-bottom: 2px solid black"><span class="elsevierStyleItalic">F</span> \t\t\t\t\t\t\n \t\t\t\t\t\t</th><a name="en0132"></a><th class="td" title="\n \t\t\t\t\ttable-head\n \t\t\t\t " align="" valign="top" scope="col" style="border-bottom: 2px solid black"><span class="elsevierStyleItalic">p</span> \t\t\t\t\t\t\n \t\t\t\t\t\t</th><a name="en0133"></a><th class="td" title="\n \t\t\t\t\ttable-head\n \t\t\t\t " align="" valign="top" scope="col" style="border-bottom: 2px solid black"><span class="elsevierStyleItalic">η<span class="elsevierStyleInf">p</span><span class="elsevierStyleSup">2</span></span> \t\t\t\t\t\t\n \t\t\t\t\t\t</th></tr></thead><tbody title="tbody"><tr title="table-row"><a name="en0134"></a><td class="td-with-role" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t ; entry_with_role_colgroup " colspan="6" align="left" valign="top">Phase2: Congruent condition</td></tr><tr title="table-row"><a name="en0135"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">Gaze proportion of face \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0136"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.786 (0.163) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0137"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.887 (0.045) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0138"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">8.532 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0139"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top"><span class="elsevierStyleBold">0.005</span> \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0140"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.146 \t\t\t\t\t\t\n \t\t\t\t</td></tr><tr title="table-row"><a name="en0141"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">Gaze proportion of eyes \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0142"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.255 (0.208) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0143"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.325 (0.224) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0144"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">1.795 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0145"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.186 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0146"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.035 \t\t\t\t\t\t\n \t\t\t\t</td></tr><tr title="table-row"><a name="en0147"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">Gaze proportion of mouth \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0148"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.203 (0.162) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0149"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.221 (0.234) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0150"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.109 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0151"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.743 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0152"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.002 \t\t\t\t\t\t\n \t\t\t\t</td></tr><tr title="table-row"><a name="en0153"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">Gaze proportion of context \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0154"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.049 (0.043) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0155"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.037 (0.029) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0156"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">2.161 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0157"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.148 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0158"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.041 \t\t\t\t\t\t\n \t\t\t\t</td></tr><tr title="table-row"><a name="en0159"></a><td class="td-with-role" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t ; entry_with_role_colgroup " colspan="6" align="left" valign="top">Phase2: Incongruent condition</td></tr><tr title="table-row"><a name="en0160"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">Gaze proportion of face \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0161"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.796 (0.124) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0162"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.791 (0.157) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0163"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.670 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0164"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.416 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0165"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.011 \t\t\t\t\t\t\n \t\t\t\t</td></tr><tr title="table-row"><a name="en0166"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">Gaze proportion of eyes \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0167"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.269 (0.203) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0168"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.255 (0.212) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0169"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top"><0.001 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0170"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.996 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0171"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top"><0.001 \t\t\t\t\t\t\n \t\t\t\t</td></tr><tr title="table-row"><a name="en0172"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">Gaze proportion of mouth \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0173"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.214 (0.180) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0174"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.216 (0.237) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0175"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.058 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0176"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.811 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0177"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top"><0.001 \t\t\t\t\t\t\n \t\t\t\t</td></tr><tr title="table-row"><a name="en0178"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">Gaze proportion of context \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0179"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.053 (0.035) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0180"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.035 (0.018) \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0181"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">2.400 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0182"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.128 \t\t\t\t\t\t\n \t\t\t\t</td><a name="en0183"></a><td class="td" title="\n \t\t\t\t\ttable-entry\n \t\t\t\t " align="" valign="top">0.046 \t\t\t\t\t\t\n \t\t\t\t</td></tr></tbody></table> """ ] "imagenFichero" => array:1 [ 0 => "xTab3298097.png" ] ] ] ] "descripcion" => array:1 [ "en" => "<p id="spara008" class="elsevierStyleSimplePara elsevierViewall">Gaze proportions under condition where facial stimuli were presented with mild emotional intensity.</p>" ] ] 6 => array:6 [ "identificador" => "ecom0001" "tipo" => "MULTIMEDIAECOMPONENTE" "mostrarFloat" => false "mostrarDisplay" => true "detalles" => array:1 [ 0 => array:3 [ "identificador" => "alt0007" "detalle" => "Image, application " "rol" => "short" ] ] "Ecomponente" => array:2 [ "fichero" => "mmc1.docx" "ficheroTamanyo" => 948525 ] ] ] "bibliografia" => array:2 [ "titulo" => "References" "seccion" => array:1 [ 0 => array:2 [ "identificador" => "cebibsec1" "bibliografiaReferencia" => array:36 [ 0 => array:3 [ "identificador" => "bib0002" "etiqueta" => "Baron-Cohen et al., 2001" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "The autism-spectrum quotient (AQ): Evidence from asperger syndrome/high-functioning autism, malesand females, scientists and mathematicians" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:5 [ 0 => "S. Baron-Cohen" 1 => "S. Wheelwright" 2 => "R. Skinner" 3 => "J. Martin" 4 => "E. Clubley" ] ] ] ] ] "host" => array:1 [ 0 => array:2 [ "doi" => "10.1023/A:1005653411471" "Revista" => array:7 [ "tituloSerie" => "Journal of Autism and Developmental Disorders" "fecha" => "2001" "volumen" => "31" "numero" => "1" "paginaInicial" => "5" "paginaFinal" => "17" "link" => array:1 [ 0 => array:2 [ "url" => "https://www.ncbi.nlm.nih.gov/pubmed/11439754" "web" => "Medline" ] ] ] ] ] ] ] ] 1 => array:3 [ "identificador" => "bib0003" "etiqueta" => "Beck et al., 1988" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "An inventory for measuring clinical anxiety: Psychometric properties" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:4 [ 0 => "A.T. Beck" 1 => "N. Epstein" 2 => "G. Brown" 3 => "R.A. Steer" ] ] ] ] ] "host" => array:1 [ 0 => array:2 [ "doi" => "10.1037/0022-006X.56.6.893" "Revista" => array:7 [ "tituloSerie" => "Journal of Consulting and Clinical Psychology" "fecha" => "1988" "volumen" => "56" "numero" => "6" "paginaInicial" => "893" "paginaFinal" => "897" "link" => array:1 [ 0 => array:2 [ "url" => "https://www.ncbi.nlm.nih.gov/pubmed/3204199" "web" => "Medline" ] ] ] ] ] ] ] ] 2 => array:3 [ "identificador" => "bib0004" "etiqueta" => "Beck et al., 1996" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "Comparison of beck depression inventories-IA and-II in psychiatric outpatients" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:4 [ 0 => "A.T. Beck" 1 => "R.A. Steer" 2 => "R. Ball" 3 => "W.F. Ranieri" ] ] ] ] ] "host" => array:1 [ 0 => array:2 [ "doi" => "10.1207/s15327752jpa6703_13" "Revista" => array:7 [ "tituloSerie" => "Journal of Personality Assessment" "fecha" => "1996" "volumen" => "67" "numero" => "3" "paginaInicial" => "588" "paginaFinal" => "597" "link" => array:1 [ 0 => array:2 [ "url" => "https://www.ncbi.nlm.nih.gov/pubmed/8991972" "web" => "Medline" ] ] ] ] ] ] ] ] 3 => array:3 [ "identificador" => "bib0005" "etiqueta" => "Bergman et al., 2021" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "Is a negative attentional bias in individuals with autism spectrum disorder explained by comorbid depression? An eye-tracking study" "autores" => array:1 [ 0 => array:2 [ "etal" => true "autores" => array:6 [ 0 => "M.A. Bergman" 1 => "J.N. Vrijsen" 2 => "M. Rinck" 3 => "I. van Oostrom" 4 => "C.C. Kan" 5 => "R.M. Collard" ] ] ] ] ] "host" => array:1 [ 0 => array:2 [ "doi" => "10.1007/s10803-021-04880-6" "Revista" => array:6 [ "tituloSerie" => "Journal of Autism and Developmental Disorders" "fecha" => "2021" "volumen" => "51" "paginaInicial" => "4213" "paginaFinal" => "4226" "link" => array:1 [ 0 => array:2 [ "url" => "https://www.ncbi.nlm.nih.gov/pubmed/33491119" "web" => "Medline" ] ] ] ] ] ] ] ] 4 => array:3 [ "identificador" => "bib0006" "etiqueta" => "Cha & Lee, 2022" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "Diminished ability to integrate target stimuli with context during emotional recognition in individuals with broad autism phenotype" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:2 [ 0 => "W.J. Cha" 1 => "J.H. Lee" ] ] ] ] ] "host" => array:1 [ 0 => array:2 [ "doi" => "10.3389/fpsyg.2022.934385" "Revista" => array:5 [ "tituloSerie" => "Frontiers in Psychology" "fecha" => "2022" "volumen" => "13" "paginaInicial" => "934385" "link" => array:1 [ 0 => array:2 [ "url" => "https://www.ncbi.nlm.nih.gov/pubmed/36275254" "web" => "Medline" ] ] ] ] ] ] ] ] 5 => array:3 [ "identificador" => "bib0007" "etiqueta" => "Costa-Cordella et al., 2021" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "Social support and cognition: A systematic review" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:4 [ 0 => "S. Costa-Cordella" 1 => "C. Arevalo-Romero" 2 => "F.J. Parada" 3 => "A. Rossi" ] ] ] ] ] "host" => array:1 [ 0 => array:2 [ "doi" => "10.3389/fpsyg.2021.637060" "Revista" => array:3 [ "tituloSerie" => "Frontiers in Psychology" "fecha" => "2021" "volumen" => "12" ] ] ] ] ] ] 6 => array:3 [ "identificador" => "bib0008" "etiqueta" => "Faul et al., 2007" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "G*Power 3: A flexible statistical power analysis program for the social, behavioral, and biomedical sciences" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:4 [ 0 => "F. Faul" 1 => "E. Erdfelder" 2 => "A.G. Lang" 3 => "A. Buchner" ] ] ] ] ] "host" => array:1 [ 0 => array:2 [ "doi" => "10.3758/bf03193146" "Revista" => array:6 [ "tituloSerie" => "Behavior Research Methods" "fecha" => "2007" "volumen" => "39" "paginaInicial" => "175" "paginaFinal" => "191" "link" => array:1 [ 0 => array:2 [ "url" => "https://www.ncbi.nlm.nih.gov/pubmed/17695343" "web" => "Medline" ] ] ] ] ] ] ] ] 7 => array:3 [ "identificador" => "bib0009" "etiqueta" => "Garcia-Garcia et al., 2022" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "Using emotion recognition technologies to teach children with autism spectrum disorder how to identify and express emotions" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:4 [ 0 => "J.M. Garcia-Garcia" 1 => "V.M. Penichet" 2 => "M.D. Lozano" 3 => "A. Fernando" ] ] ] ] ] "host" => array:1 [ 0 => array:2 [ "doi" => "10.1007/s10209-021-00818-y" "Revista" => array:6 [ "tituloSerie" => "Universal Access in the Information Society" "fecha" => "2022" "volumen" => "21" "numero" => "4" "paginaInicial" => "809" "paginaFinal" => "825" ] ] ] ] ] ] 8 => array:3 [ "identificador" => "bib0010" "etiqueta" => "Happe and Frith, 2006" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "The weak coherence account: Detail-focused cognitive style in autism spectrum disorders" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:2 [ 0 => "F. Happe" 1 => "U. Frith" ] ] ] ] ] "host" => array:1 [ 0 => array:2 [ "doi" => "10.1007/s10803-005-0039-0" "Revista" => array:7 [ "tituloSerie" => "Journal of Autism and Developmental Disorders" "fecha" => "2006" "volumen" => "36" "numero" => "1" "paginaInicial" => "5" "paginaFinal" => "25" "link" => array:1 [ 0 => array:2 [ "url" => "https://www.ncbi.nlm.nih.gov/pubmed/16450045" "web" => "Medline" ] ] ] ] ] ] ] ] 9 => array:3 [ "identificador" => "bib0011" "etiqueta" => "Haxby et al., 2000" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "The distributed human neural system for face perception" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:3 [ 0 => "J.V. Haxby" 1 => "E.A. Hoffman" 2 => "M.I. Gobbini" ] ] ] ] ] "host" => array:1 [ 0 => array:2 [ "doi" => "10.1016/S1364-6613(00)01482-0" "Revista" => array:7 [ "tituloSerie" => "Trends in Cognitive Sciences" "fecha" => "2000" "volumen" => "4" "numero" => "6" "paginaInicial" => "223" "paginaFinal" => "233" "link" => array:1 [ 0 => array:2 [ "url" => "https://www.ncbi.nlm.nih.gov/pubmed/10827445" "web" => "Medline" ] ] ] ] ] ] ] ] 10 => array:3 [ "identificador" => "bib0012" "etiqueta" => "Holt et al., 2014" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "Reading the mind in the eyes’: An fMRI study of adolescents with autism and their siblings" "autores" => array:1 [ 0 => array:2 [ "etal" => true "autores" => array:6 [ 0 => "R.J. Holt" 1 => "L.R. Chura" 2 => "M.C. Lai" 3 => "J. Suckling" 4 => "E. Von Dem Hagen" 5 => "A.J. Calder" ] ] ] ] ] "host" => array:1 [ 0 => array:2 [ "doi" => "10.1017/S0033291714000233" "Revista" => array:7 [ "tituloSerie" => "Psychological Medicine" "fecha" => "2014" "volumen" => "44" "numero" => "15" "paginaInicial" => "3215" "paginaFinal" => "3227" "link" => array:1 [ 0 => array:2 [ "url" => "https://www.ncbi.nlm.nih.gov/pubmed/25065819" "web" => "Medline" ] ] ] ] ] ] ] ] 11 => array:3 [ "identificador" => "bib0013" "etiqueta" => "Hurley et al., 2007" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "The broad autism phenotype questionnaire" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:5 [ 0 => "R.S. Hurley" 1 => "M. Losh" 2 => "M. Parlier" 3 => "J.S. Reznick" 4 => "J. Piven" ] ] ] ] ] "host" => array:1 [ 0 => array:2 [ "doi" => "10.1007/s10803-006-0299-3" "Revista" => array:7 [ "tituloSerie" => "Journal of Autism and Developmental Disorders" "fecha" => "2007" "volumen" => "37" "numero" => "9" "paginaInicial" => "1679" "paginaFinal" => "1690" "link" => array:1 [ 0 => array:2 [ "url" => "https://www.ncbi.nlm.nih.gov/pubmed/17146701" "web" => "Medline" ] ] ] ] ] ] ] ] 12 => array:3 [ "identificador" => "bib0014" "etiqueta" => "Kaliukhovich et al., 2020" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "Social attention to activities in children and adults with autism spectrum disorder: Effects of context and age" "autores" => array:1 [ 0 => array:2 [ "etal" => true "autores" => array:6 [ 0 => "D.A. Kaliukhovich" 1 => "N.V. Manyakov" 2 => "A. Bangerter" 3 => "S. Ness" 4 => "A. Skalkin" 5 => "M.S. Goodwin" ] ] ] ] ] "host" => array:1 [ 0 => array:2 [ "doi" => "10.1186/s13229-019-0309-x" "Revista" => array:6 [ "tituloSerie" => "Molecular Autism" "fecha" => "2020" "volumen" => "11" "paginaInicial" => "1" "paginaFinal" => "14" "link" => array:1 [ 0 => array:2 [ "url" => "https://www.ncbi.nlm.nih.gov/pubmed/31893022" "web" => "Medline" ] ] ] ] ] ] ] ] 13 => array:3 [ "identificador" => "bib0015" "etiqueta" => "Kim and Kim, 2022" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "Psychometric properties of the Korean version of the broad autism phenotype questionnaire" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:2 [ 0 => "H.J. Kim" 1 => "K. Kim" ] ] ] ] ] "host" => array:1 [ 0 => array:2 [ "doi" => "10.2224/sbp.11153" "Revista" => array:6 [ "tituloSerie" => "Social Behavior and Personality: An International Journal" "fecha" => "2022" "volumen" => "50" "numero" => "2" "paginaInicial" => "1" "paginaFinal" => "13" ] ] ] ] ] ] 14 => array:3 [ "identificador" => "bib0016" "etiqueta" => "Ko et al., 2018" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "The reliability and validity of the Korean version of the autism-spectrum quotient" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:6 [ 0 => "H.Y. Ko" 1 => "W.H. Lee" 2 => "E.K. Won" 3 => "J.J. Ban" 4 => "D.E. Jung" 5 => "Y. Kim" ] ] ] ] ] "host" => array:1 [ 0 => array:2 [ "doi" => "10.30773/pi.2018.02.18.1" "Revista" => array:7 [ "tituloSerie" => "Psychiatry Investigation" "fecha" => "2018" "volumen" => "15" "numero" => "8" "paginaInicial" => "783" "paginaFinal" => "789" "link" => array:1 [ 0 => array:2 [ "url" => "https://www.ncbi.nlm.nih.gov/pubmed/29945427" "web" => "Medline" ] ] ] ] ] ] ] ] 15 => array:3 [ "identificador" => "bib0017" "etiqueta" => "Kulasinghe et al., 2021" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "Mental health, broad autism phenotype and psychological inflexibility in mothers of young children with autism spectrum disorder in Australia: A cross-sectional survey" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:3 [ 0 => "K. Kulasinghe" 1 => "K. Whittingham" 2 => "A.E. Mitchell" ] ] ] ] ] "host" => array:1 [ 0 => array:2 [ "doi" => "10.1177/1362361320984625" "Revista" => array:7 [ "tituloSerie" => "Autism: The International Journal of Research and Practice" "fecha" => "2021" "volumen" => "25" "numero" => "5" "paginaInicial" => "1187" "paginaFinal" => "1202" "link" => array:1 [ 0 => array:2 [ "url" => "https://www.ncbi.nlm.nih.gov/pubmed/33504195" "web" => "Medline" ] ] ] ] ] ] ] ] 16 => array:3 [ "identificador" => "bib0018" "etiqueta" => "Le et al., 2022" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "Global-local attention for emotion recognition" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:4 [ 0 => "N. Le" 1 => "K. Nguyen" 2 => "A. Nguyen" 3 => "B. Le" ] ] ] ] ] "host" => array:1 [ 0 => array:2 [ "doi" => "10.1007/s00521-021-06778-x" "Revista" => array:6 [ "tituloSerie" => "Neural Computing and Applications" "fecha" => "2022" "volumen" => "34" "numero" => "24" "paginaInicial" => "21625" "paginaFinal" => "21639" ] ] ] ] ] ] 17 => array:3 [ "identificador" => "bib0019" "etiqueta" => "Lee et al., 2017" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "Reliability and validity of the beck depression inventory-II among Korean adolescents" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:5 [ 0 => "E.H. Lee" 1 => "S.J. Lee" 2 => "S.T. Hwang" 3 => "S.H. Hong" 4 => "J.H. Kim" ] ] ] ] ] "host" => array:1 [ 0 => array:2 [ "doi" => "10.4306/pi.2017.14.1.30" "Revista" => array:7 [ "tituloSerie" => "Psychiatry Investigation" "fecha" => "2017" "volumen" => "14" "numero" => "1" "paginaInicial" => "30" "paginaFinal" => "36" "link" => array:1 [ 0 => array:2 [ "url" => "https://www.ncbi.nlm.nih.gov/pubmed/28096872" "web" => "Medline" ] ] ] ] ] ] ] ] 18 => array:3 [ "identificador" => "bib0020" "etiqueta" => "Lee et al., 2013" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "Development and standardization of extended ChaeLee Korean facial expressions of emotions" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:5 [ 0 => "K.U. Lee" 1 => "J. Kim" 2 => "B. Yeon" 3 => "S.H. Kim" 4 => "J.H. Chae" ] ] ] ] ] "host" => array:1 [ 0 => array:2 [ "doi" => "10.4306/pi.2013.10.2.155" "Revista" => array:7 [ "tituloSerie" => "Psychiatry Investigation" "fecha" => "2013" "volumen" => "10" "numero" => "2" "paginaInicial" => "155" "paginaFinal" => "163" "link" => array:1 [ 0 => array:2 [ "url" => "https://www.ncbi.nlm.nih.gov/pubmed/23798964" "web" => "Medline" ] ] ] ] ] ] ] ] 19 => array:3 [ "identificador" => "bib0021" "etiqueta" => "Merin et al., 2007" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "Visual fixation patterns during reciprocal social interaction distinguish a subgroup of 6-month-old infants at-risk for autism from comparison infants" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:4 [ 0 => "N. Merin" 1 => "G.S. Young" 2 => "S. Ozonoff" 3 => "S.J. Rogers" ] ] ] ] ] "host" => array:1 [ 0 => array:2 [ "doi" => "10.1007/s10803-006-0342-4" "Revista" => array:7 [ "tituloSerie" => "Journal of Autism and Developmental Disorders" "fecha" => "2007" "volumen" => "37" "numero" => "1" "paginaInicial" => "108" "paginaFinal" => "121" "link" => array:1 [ 0 => array:2 [ "url" => "https://www.ncbi.nlm.nih.gov/pubmed/17191096" "web" => "Medline" ] ] ] ] ] ] ] ] 20 => array:3 [ "identificador" => "bib0022" "etiqueta" => "Mondada, 2016" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "Challenges of multimodality: Language and the body in social interaction" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:1 [ 0 => "L. Mondada" ] ] ] ] ] "host" => array:1 [ 0 => array:1 [ "Revista" => array:6 [ "tituloSerie" => "Journal of Sociolinguistics" "fecha" => "2016" "volumen" => "20" "numero" => "3" "paginaInicial" => "336" "paginaFinal" => "366" ] ] ] ] ] ] 21 => array:3 [ "identificador" => "bib0023" "etiqueta" => "Morrison et al., 1988" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "Deficits in facial-affect recognition and schizophrenia" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:3 [ 0 => "R.L. Morrison" 1 => "A.S. Bellack" 2 => "K.T. Mueser" ] ] ] ] ] "host" => array:1 [ 0 => array:2 [ "doi" => "10.1093/schbul/14.1.67" "Revista" => array:7 [ "tituloSerie" => "Schizophrenia Bulletin" "fecha" => "1988" "volumen" => "14" "numero" => "1" "paginaInicial" => "67" "paginaFinal" => "83" "link" => array:1 [ 0 => array:2 [ "url" => "https://www.ncbi.nlm.nih.gov/pubmed/3291095" "web" => "Medline" ] ] ] ] ] ] ] ] 22 => array:3 [ "identificador" => "bib0024" "etiqueta" => "Murias et al., 2018" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "Validation of eye-tracking measures of social attention as a potential biomarker for autism clinical trials" "autores" => array:1 [ 0 => array:2 [ "etal" => true "autores" => array:6 [ 0 => "M. Murias" 1 => "S. Major" 2 => "K. Davlantis" 3 => "L. Franz" 4 => "A. Harris" 5 => "B. Rardin" ] ] ] ] ] "host" => array:1 [ 0 => array:2 [ "doi" => "10.1002/aur.1894" "Revista" => array:7 [ "tituloSerie" => "Autism Research" "fecha" => "2018" "volumen" => "11" "numero" => "1" "paginaInicial" => "166" "paginaFinal" => "174" "link" => array:1 [ 0 => array:2 [ "url" => "https://www.ncbi.nlm.nih.gov/pubmed/29193826" "web" => "Medline" ] ] ] ] ] ] ] ] 23 => array:3 [ "identificador" => "bib0025" "etiqueta" => "Nayar et al., 2022" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "A constellation of eye-tracking measures reveals social attention differences in ASD and the broad autism phenotype" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:4 [ 0 => "K. Nayar" 1 => "F. Shic" 2 => "M. Winston" 3 => "M. Losh" ] ] ] ] ] "host" => array:1 [ 0 => array:2 [ "doi" => "10.1186/s13229-022-00490-w" "Revista" => array:6 [ "tituloSerie" => "Molecular Autism" "fecha" => "2022" "volumen" => "13" "numero" => "1" "paginaInicial" => "18" "link" => array:1 [ 0 => array:2 [ "url" => "https://www.ncbi.nlm.nih.gov/pubmed/35509089" "web" => "Medline" ] ] ] ] ] ] ] ] 24 => array:3 [ "identificador" => "bib0027" "etiqueta" => "Pisula and Ziegart-Sadowska, 2015" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "Broader autism phenotype in siblings of children with ASD—a review" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:2 [ 0 => "E. Pisula" 1 => "K. Ziegart-Sadowska" ] ] ] ] ] "host" => array:1 [ 0 => array:2 [ "doi" => "10.3390/ijms160613217" "Revista" => array:7 [ "tituloSerie" => "International Journal of Molecular Sciences" "fecha" => "2015" "volumen" => "16" "numero" => "6" "paginaInicial" => "13217" "paginaFinal" => "13258" "link" => array:1 [ 0 => array:2 [ "url" => "https://www.ncbi.nlm.nih.gov/pubmed/26068453" "web" => "Medline" ] ] ] ] ] ] ] ] 25 => array:3 [ "identificador" => "bib0028" "etiqueta" => "Riddiford et al., 2022" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "Gaze and social functioning associations in autism spectrum disorder: A systematic review and meta-analysis" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:4 [ 0 => "J.A. Riddiford" 1 => "P.G. Enticott" 2 => "A. Lavale" 3 => "C. Gurvich" ] ] ] ] ] "host" => array:1 [ 0 => array:2 [ "doi" => "10.1002/aur.2729" "Revista" => array:7 [ "tituloSerie" => "Autism Research" "fecha" => "2022" "volumen" => "15" "numero" => "8" "paginaInicial" => "1380" "paginaFinal" => "1446" "link" => array:1 [ 0 => array:2 [ "url" => "https://www.ncbi.nlm.nih.gov/pubmed/35593039" "web" => "Medline" ] ] ] ] ] ] ] ] 26 => array:3 [ "identificador" => "bib0029" "etiqueta" => "Rigby et al., 2016" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "Gaze patterns during scene processing in typical adults and adults with autism spectrum disorders" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:3 [ 0 => "S.N. Rigby" 1 => "B.M. Stoesz" 2 => "L.S. Jakobson" ] ] ] ] ] "host" => array:1 [ 0 => array:1 [ "Revista" => array:5 [ "tituloSerie" => "Research in Autism Spectrum Disorders" "fecha" => "2016" "volumen" => "25" "paginaInicial" => "24" "paginaFinal" => "36" ] ] ] ] ] ] 27 => array:3 [ "identificador" => "bib0030" "etiqueta" => "Sasson et al., 2013" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "Social cognition, social skill, and the broad autism phenotype" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:3 [ 0 => "N.J. Sasson" 1 => "R.B. Nowlin" 2 => "A.E. Pinkham" ] ] ] ] ] "host" => array:1 [ 0 => array:2 [ "doi" => "10.1177/1362361312455704" "Revista" => array:7 [ "tituloSerie" => "Autism: The International Journal of Research and Practice" "fecha" => "2013" "volumen" => "17" "numero" => "6" "paginaInicial" => "655" "paginaFinal" => "667" "link" => array:1 [ 0 => array:2 [ "url" => "https://www.ncbi.nlm.nih.gov/pubmed/22987889" "web" => "Medline" ] ] ] ] ] ] ] ] 28 => array:3 [ "identificador" => "bib0031" "etiqueta" => "Sasson et al., 2016" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "Context effects on facial affect recognition in schizophrenia and autism: Behavioral and eye-tracking evidence" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:5 [ 0 => "N.J. Sasson" 1 => "A.E. Pinkham" 2 => "L.P. Weittenhiller" 3 => "D.J. Faso" 4 => "C. Simpson" ] ] ] ] ] "host" => array:1 [ 0 => array:2 [ "doi" => "10.1093/schbul/sbv176" "Revista" => array:7 [ "tituloSerie" => "Schizophrenia Bulletin" "fecha" => "2016" "volumen" => "42" "numero" => "3" "paginaInicial" => "675" "paginaFinal" => "683" "link" => array:1 [ 0 => array:2 [ "url" => "https://www.ncbi.nlm.nih.gov/pubmed/26645375" "web" => "Medline" ] ] ] ] ] ] ] ] 29 => array:3 [ "identificador" => "bib0033" "etiqueta" => "Setien-Ramos et al., 2022" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "Eye-tracking studies in adults with autism spectrum disorder: A systematic review and meta-analysis" "autores" => array:1 [ 0 => array:2 [ "etal" => true "autores" => array:6 [ 0 => "I. Setien-Ramos" 1 => "J. Lugo-Marín" 2 => "L. Gisbert-Gustemps" 3 => "E. Díez-Villoria" 4 => "M. Magán-Maganto" 5 => "R. Canal-Bedia" ] ] ] ] ] "host" => array:1 [ 0 => array:2 [ "doi" => "10.1007/s10803-022-05524-z" "Revista" => array:4 [ "tituloSerie" => "Journal of Autism and Developmental Disorders" "fecha" => "2022" "paginaInicial" => "1" "paginaFinal" => "14" ] ] ] ] ] ] 30 => array:3 [ "identificador" => "bib0034" "etiqueta" => "Shic et al., 2011" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "Limited activity monitoring in toddlers with autism spec- trum disorder" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:5 [ 0 => "F. Shic" 1 => "J. Bradshaw" 2 => "A. Klin" 3 => "B. Scassellati" 4 => "K. Chawarska" ] ] ] ] ] "host" => array:1 [ 0 => array:2 [ "doi" => "10.1016/j.brainres.2010.11.074" "Revista" => array:6 [ "tituloSerie" => "Brain Research" "fecha" => "2011" "volumen" => "1380" "paginaInicial" => "246" "paginaFinal" => "254" "link" => array:1 [ 0 => array:2 [ "url" => "https://www.ncbi.nlm.nih.gov/pubmed/21129365" "web" => "Medline" ] ] ] ] ] ] ] ] 31 => array:3 [ "identificador" => "bib0035" "etiqueta" => "Soto-Icaza et al., 2022" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "Differences in cortical processing of facial emotions in broader autism phenotype" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:5 [ 0 => "P. Soto-Icaza" 1 => "B. Beffara-Bret" 2 => "L. Vargas" 3 => "F. Aboitiz" 4 => "P. Billeke" ] ] ] ] ] "host" => array:1 [ 0 => array:2 [ "doi" => "10.1371/journal.pone.0262004" "Revista" => array:4 [ "tituloSerie" => "PloS One" "fecha" => "2022" "volumen" => "17" "numero" => "1" ] ] ] ] ] ] 32 => array:3 [ "identificador" => "bib0036" "etiqueta" => "Stanley et al., 2013" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "Cultural differences in gaze and emotion recognition: Americans contrast more than Chinese" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:4 [ 0 => "J.T. Stanley" 1 => "X. Zhang" 2 => "H.H. Fung" 3 => "D.M. Isaacowitz" ] ] ] ] ] "host" => array:1 [ 0 => array:2 [ "doi" => "10.1037/a0029209" "Revista" => array:6 [ "tituloSerie" => "Emotion (Washington, D.C.)" "fecha" => "2013" "volumen" => "13" "numero" => "1" "paginaInicial" => "36" "link" => array:1 [ 0 => array:2 [ "url" => "https://www.ncbi.nlm.nih.gov/pubmed/22889414" "web" => "Medline" ] ] ] ] ] ] ] ] 33 => array:3 [ "identificador" => "bib0037" "etiqueta" => "Stewart et al., 2020" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "Theory of mind performance in younger and older adults with elevated autistic traits" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:4 [ 0 => "G.R. Stewart" 1 => "G.L. Wallace" 2 => "M. Cottam" 3 => "R.A. Charlton" ] ] ] ] ] "host" => array:1 [ 0 => array:2 [ "doi" => "10.1002/aur.2206" "Revista" => array:7 [ "tituloSerie" => "Autism Research" "fecha" => "2020" "volumen" => "13" "numero" => "5" "paginaInicial" => "751" "paginaFinal" => "762" "link" => array:1 [ 0 => array:2 [ "url" => "https://www.ncbi.nlm.nih.gov/pubmed/31520519" "web" => "Medline" ] ] ] ] ] ] ] ] 34 => array:3 [ "identificador" => "bib0038" "etiqueta" => "Wells et al., 2016" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "Identification of emotional facial expressions: Effects of expression, intensity, and sex on eye gaze" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:3 [ 0 => "L.J. Wells" 1 => "S.M. Gillespie" 2 => "P. Rotshtein" ] ] ] ] ] "host" => array:1 [ 0 => array:1 [ "Revista" => array:4 [ "tituloSerie" => "PloS One" "fecha" => "2016" "volumen" => "11" "numero" => "12" ] ] ] ] ] ] 35 => array:3 [ "identificador" => "bib0039" "etiqueta" => "Yook and Kim, 1997" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "A clinical study on the Korean version of Beck anxiety inventory: Comparative study of patient and non-patient" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:2 [ 0 => "S.P. Yook" 1 => "Z.S. Kim" ] ] ] ] ] "host" => array:1 [ 0 => array:1 [ "Revista" => array:6 [ "tituloSerie" => "Korean Journal of Clinical Psychology" "fecha" => "1997" "volumen" => "16" "numero" => "1" "paginaInicial" => "185" "paginaFinal" => "197" ] ] ] ] ] ] ] ] ] ] ] "idiomaDefecto" => "en" "url" => "/16972600/0000002300000004/v1_202310051056/S1697260023000352/v1_202310051056/en/main.assets" "Apartado" => array:4 [ "identificador" => "26683" "tipo" => "SECCION" "en" => array:2 [ "titulo" => "Original articles" "idiomaDefecto" => true ] "idiomaDefecto" => "en" ] "PDF" => "https://static.elsevier.es/multimedia/16972600/0000002300000004/v1_202310051056/S1697260023000352/v1_202310051056/en/main.pdf?idApp=UINPBA00004N&text.app=https://www.elsevier.es/" "EPUB" => "https://multimedia.elsevier.es/PublicationsMultimediaV1/item/epub/S1697260023000352?idApp=UINPBA00004N" ]
Year/Month | Html | Total | |
---|---|---|---|
2024 October | 24 | 9 | 33 |
2024 September | 36 | 14 | 50 |
2024 August | 26 | 16 | 42 |
2024 July | 22 | 6 | 28 |
2024 June | 21 | 10 | 31 |
2024 May | 36 | 7 | 43 |
2024 April | 26 | 9 | 35 |
2024 March | 45 | 11 | 56 |
2024 February | 16 | 8 | 24 |
2024 January | 35 | 9 | 44 |
2023 December | 21 | 3 | 24 |
2023 November | 39 | 10 | 49 |
2023 October | 76 | 14 | 90 |
2023 September | 25 | 7 | 32 |
2023 August | 65 | 23 | 88 |
2023 July | 11 | 10 | 21 |