jalauer commited on
Commit
2e77130
·
verified ·
1 Parent(s): a578591

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +28 -0
  2. LICENSE.txt +208 -0
  3. README.md +75 -0
  4. data/14000_1_QUINN_3AOB.mat +3 -0
  5. data/18367_1_QUINN_3AOB.mat +3 -0
  6. data/19759_1_QUINN_3AOB.mat +3 -0
  7. data/20543_1_QUINN_3AOB.mat +3 -0
  8. data/3001_1_3AOB.mat +3 -0
  9. data/3001_2_3AOB.mat +3 -0
  10. data/3001_3_3AOB.mat +3 -0
  11. data/3002_1_3AOB.mat +3 -0
  12. data/3002_2_3AOB.mat +3 -0
  13. data/3002_3_3AOB.mat +3 -0
  14. data/3003_1_3AOB.mat +3 -0
  15. data/3003_2_3AOB.mat +3 -0
  16. data/3003_3_3AOB.mat +3 -0
  17. data/3004_1_3AOB.mat +3 -0
  18. data/3004_2_3AOB.mat +3 -0
  19. data/3004_3_3AOB.mat +3 -0
  20. data/3005_1_3AOB.mat +3 -0
  21. data/3005_2_3AOB.mat +3 -0
  22. data/3005_3_3AOB.mat +3 -0
  23. data/3006_1_3AOB.mat +3 -0
  24. data/3007_1_3AOB.mat +3 -0
  25. data/3007_2_3AOB.mat +3 -0
  26. data/3007_3_3AOB.mat +3 -0
  27. data/3008_1_3AOB.mat +3 -0
  28. data/3009_1_3AOB.mat +3 -0
  29. data/93385_1_QUINN_3AOB.mat +3 -0
  30. data/94391_1_QUINN_3AOB.mat +3 -0
  31. data/97750_1_QUINN_3AOB.mat +3 -0
  32. scripts/BV_Chanlocs_60.mat +0 -0
  33. scripts/BigAgg_Data.mat +0 -0
  34. scripts/Manuscript_3AOB.m +110 -0
  35. scripts/ORIGINAL_README.txt +10 -0
  36. scripts/QUALITY_CHECK.xlsx +0 -0
  37. scripts/QUINN_QUALITY_CHECK.xlsx +0 -0
  38. scripts/Run_Thresh_1D.m +61 -0
  39. scripts/STEP1_3AOB_JFC.m +192 -0
  40. scripts/STEP2_3AOB_Process.m +250 -0
  41. scripts/s1_Load_Data.m +121 -0
  42. scripts/s2_Kill_Data.m +54 -0
  43. scripts/s3_Demographics.m +88 -0
  44. scripts/s4_Example_ERPs.m +30 -0
  45. scripts/s5_ERPs_by_Group.m +137 -0
  46. scripts/s6_Correlations.m +36 -0
  47. scripts/s6_Correlations_S1EEG_With_FrSBediffs.m +111 -0
  48. scripts/s6_FOR_SPSS.m +111 -0
  49. scripts/s7_Mengs_z.m +51 -0
  50. scripts/sx_Predict_Attrition.m +85 -0
.gitattributes CHANGED
@@ -57,3 +57,31 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
57
  # Video files - compressed
58
  *.mp4 filter=lfs diff=lfs merge=lfs -text
59
  *.webm filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
57
  # Video files - compressed
58
  *.mp4 filter=lfs diff=lfs merge=lfs -text
59
  *.webm filter=lfs diff=lfs merge=lfs -text
60
+ data/93385_1_QUINN_3AOB.mat filter=lfs diff=lfs merge=lfs -text
61
+ data/94391_1_QUINN_3AOB.mat filter=lfs diff=lfs merge=lfs -text
62
+ data/97750_1_QUINN_3AOB.mat filter=lfs diff=lfs merge=lfs -text
63
+ data/14000_1_QUINN_3AOB.mat filter=lfs diff=lfs merge=lfs -text
64
+ data/18367_1_QUINN_3AOB.mat filter=lfs diff=lfs merge=lfs -text
65
+ data/19759_1_QUINN_3AOB.mat filter=lfs diff=lfs merge=lfs -text
66
+ data/20543_1_QUINN_3AOB.mat filter=lfs diff=lfs merge=lfs -text
67
+ data/3001_1_3AOB.mat filter=lfs diff=lfs merge=lfs -text
68
+ data/3001_2_3AOB.mat filter=lfs diff=lfs merge=lfs -text
69
+ data/3001_3_3AOB.mat filter=lfs diff=lfs merge=lfs -text
70
+ data/3002_1_3AOB.mat filter=lfs diff=lfs merge=lfs -text
71
+ data/3002_2_3AOB.mat filter=lfs diff=lfs merge=lfs -text
72
+ data/3002_3_3AOB.mat filter=lfs diff=lfs merge=lfs -text
73
+ data/3003_1_3AOB.mat filter=lfs diff=lfs merge=lfs -text
74
+ data/3003_2_3AOB.mat filter=lfs diff=lfs merge=lfs -text
75
+ data/3003_3_3AOB.mat filter=lfs diff=lfs merge=lfs -text
76
+ data/3004_1_3AOB.mat filter=lfs diff=lfs merge=lfs -text
77
+ data/3004_2_3AOB.mat filter=lfs diff=lfs merge=lfs -text
78
+ data/3004_3_3AOB.mat filter=lfs diff=lfs merge=lfs -text
79
+ data/3005_1_3AOB.mat filter=lfs diff=lfs merge=lfs -text
80
+ data/3005_2_3AOB.mat filter=lfs diff=lfs merge=lfs -text
81
+ data/3005_3_3AOB.mat filter=lfs diff=lfs merge=lfs -text
82
+ data/3006_1_3AOB.mat filter=lfs diff=lfs merge=lfs -text
83
+ data/3007_1_3AOB.mat filter=lfs diff=lfs merge=lfs -text
84
+ data/3007_2_3AOB.mat filter=lfs diff=lfs merge=lfs -text
85
+ data/3007_3_3AOB.mat filter=lfs diff=lfs merge=lfs -text
86
+ data/3008_1_3AOB.mat filter=lfs diff=lfs merge=lfs -text
87
+ data/3009_1_3AOB.mat filter=lfs diff=lfs merge=lfs -text
LICENSE.txt ADDED
@@ -0,0 +1,208 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Public Domain Dedication and License (PDDL)
2
+
3
+
4
+ Preamble
5
+
6
+
7
+ The Open Data Commons – Public Domain Dedication and Licence is a document intended to allow you to freely share, modify, and use this work for any purpose and without any restrictions. This licence is intended for use on databases or their contents (“data”), either together or individually.
8
+
9
+
10
+ Many databases are covered by copyright. Some jurisdictions, mainly in Europe, have specific special rights that cover databases called the “sui generis” database right. Both of these sets of rights, as well as other legal rights used to protect databases and data, can create uncertainty or practical difficulty for those wishing to share databases and their underlying data but retain a limited amount of rights under a “some rights reserved” approach to licensing as outlined in the Science Commons Protocol for Implementing Open Access Data. As a result, this waiver and licence tries to the fullest extent possible to eliminate or fully license any rights that cover this database and data. Any Community Norms or similar statements of use of the database or data do not form a part of this document, and do not act as a contract for access or other terms of use for the database or data.
11
+
12
+
13
+ The position of the recipient of the work
14
+
15
+
16
+ Because this document places the database and its contents in or as close as possible within the public domain, there are no restrictions or requirements placed on the recipient by this document. Recipients may use this work commercially, use technical protection measures, combine this data or database with other databases or data, and share their changes and additions or keep them secret. It is not a requirement that recipients provide further users with a copy of this licence or attribute the original creator of the data or database as a source. The goal is to eliminate restrictions held by the original creator of the data and database on the use of it by others.
17
+
18
+
19
+ The position of the dedicator of the work
20
+
21
+
22
+ Copyright law, as with most other law under the banner of “intellectual property”, is inherently national law. This means that there exists several differences in how copyright and other IP rights can be relinquished, waived or licensed in the many legal jurisdictions of the world. This is despite much harmonisation of minimum levels of protection. The internet and other communication technologies span these many disparate legal jurisdictions and thus pose special difficulties for a document relinquishing and waiving intellectual property rights, including copyright and database rights, for use by the global community. Because of this feature of intellectual property law, this document first relinquishes the rights and waives the relevant rights and claims. It then goes on to license these same rights for jurisdictions or areas of law that may make it difficult to relinquish or waive rights or claims.
23
+
24
+
25
+ The purpose of this document is to enable rightsholders to place their work into the public domain. Unlike licences for free and open source software, free cultural works, or open content licences, rightsholders will not be able to “dual license” their work by releasing the same work under different licences. This is because they have allowed anyone to use the work in whatever way they choose. Rightsholders therefore can’t re-license it under copyright or database rights on different terms because they have nothing left to license. Doing so creates truly accessible data to build rich applications and advance the progress of science and the arts.
26
+
27
+
28
+ This document can cover either or both of the database and its contents (the data). Because databases can have a wide variety of content – not just factual data – rightsholders should use the Open Data Commons – Public Domain Dedication & Licence for an entire database and its contents only if everything can be placed under the terms of this document. Because even factual data can sometimes have intellectual property rights, rightsholders should use this licence to cover both the database and its factual data when making material available under this document; even if it is likely that the data would not be covered by copyright or database rights.
29
+
30
+
31
+ Rightsholders can also use this document to cover any copyright or database rights claims over only a database, and leave the contents to be covered by other licences or documents. They can do this because this document refers to the “Work”, which can be either – or both – the database and its contents. As a result, rightsholders need to clearly state what they are dedicating under this document when they dedicate it.
32
+
33
+
34
+ Just like any licence or other document dealing with intellectual property, rightsholders should be aware that one can only license what one owns. Please ensure that the rights have been cleared to make this material available under this document.
35
+
36
+
37
+ This document permanently and irrevocably makes the Work available to the public for any use of any kind, and it should not be used unless the rightsholder is prepared for this to happen.
38
+
39
+
40
+ Part I: Introduction
41
+
42
+
43
+ The Rightsholder (the Person holding rights or claims over the Work) agrees as follows:
44
+
45
+
46
+ 1.0 Definitions of Capitalised Words
47
+
48
+
49
+ “Copyright” – Includes rights under copyright and under neighbouring rights and similarly related sets of rights under the law of the relevant jurisdiction under Section 6.4.
50
+
51
+
52
+ “Data” – The contents of the Database, which includes the information, independent works, or other material collected into the Database offered under the terms of this Document.
53
+
54
+
55
+ “Database” – A collection of Data arranged in a systematic or methodical way and individually accessible by electronic or other means offered under the terms of this Document.
56
+
57
+
58
+ “Database Right” – Means rights over Data resulting from the Chapter III (“sui generis”) rights in the Database Directive (Directive 96/9/EC of the European Parliament and of the Council of 11 March 1996 on the legal protection of databases) and any future updates as well as any similar rights available in the relevant jurisdiction under Section 6.4.
59
+
60
+
61
+ “Document” – means this relinquishment and waiver of rights and claims and back up licence agreement.
62
+
63
+
64
+ “Person” – Means a natural or legal person or a body of persons corporate or incorporate.
65
+
66
+
67
+ “Use” – As a verb, means doing any act that is restricted by Copyright or Database Rights whether in the original medium or any other; and includes modifying the Work as may be technically necessary to use it in a different mode or format. This includes the right to sublicense the Work.
68
+
69
+
70
+ “Work” – Means either or both of the Database and Data offered under the terms of this Document.
71
+
72
+
73
+ “You” – the Person acquiring rights under the licence elements of this Document.
74
+
75
+
76
+ Words in the singular include the plural and vice versa.
77
+
78
+
79
+ 2.0 What this document covers
80
+
81
+
82
+ 2.1. Legal effect of this Document. This Document is:
83
+
84
+
85
+ a. A dedication to the public domain and waiver of Copyright and Database Rights over the Work; and
86
+
87
+
88
+ b. A licence of Copyright and Database Rights over the Work in jurisdictions that do not allow for relinquishment or waiver.
89
+
90
+
91
+ 2.2. Legal rights covered.
92
+
93
+
94
+ a. Copyright. Any copyright or neighbouring rights in the Work. Copyright law varies between jurisdictions, but is likely to cover: the Database model or schema, which is the structure, arrangement, and organisation of the Database, and can also include the Database tables and table indexes; the data entry and output sheets; and the Field names of Data stored in the Database. Copyright may also cover the Data depending on the jurisdiction and type of Data; and
95
+
96
+
97
+ b. Database Rights. Database Rights only extend to the extraction and re-utilisation of the whole or a substantial part of the Data. Database Rights can apply even when there is no copyright over the Database. Database Rights can also apply when the Data is removed from the Database and is selected and arranged in a way that would not infringe any applicable copyright.
98
+
99
+
100
+ 2.2 Rights not covered.
101
+
102
+
103
+ a. This Document does not apply to computer programs used in the making or operation of the Database;
104
+
105
+
106
+ b. This Document does not cover any patents over the Data or the Database. Please see Section 4.2 later in this Document for further details; and
107
+
108
+
109
+ c. This Document does not cover any trade marks associated with the Database. Please see Section 4.3 later in this Document for further details.
110
+
111
+
112
+ Users of this Database are cautioned that they may have to clear other rights or consult other licences.
113
+
114
+
115
+ 2.3 Facts are free. The Rightsholder takes the position that factual information is not covered by Copyright. This Document however covers the Work in jurisdictions that may protect the factual information in the Work by Copyright, and to cover any information protected by Copyright that is contained in the Work.
116
+
117
+
118
+ Part II: Dedication to the public domain
119
+
120
+
121
+ 3.0 Dedication, waiver, and licence of Copyright and Database Rights
122
+
123
+
124
+ 3.1 Dedication of Copyright and Database Rights to the public domain. The Rightsholder by using this Document, dedicates the Work to the public domain for the benefit of the public and relinquishes all rights in Copyright and Database Rights over the Work.
125
+
126
+
127
+ a. The Rightsholder realises that once these rights are relinquished, that the Rightsholder has no further rights in Copyright and Database Rights over the Work, and that the Work is free and open for others to Use.
128
+
129
+
130
+ b. The Rightsholder intends for their relinquishment to cover all present and future rights in the Work under Copyright and Database Rights, whether they are vested or contingent rights, and that this relinquishment of rights covers all their heirs and successors.
131
+
132
+
133
+ The above relinquishment of rights applies worldwide and includes media and formats now known or created in the future.
134
+
135
+
136
+ 3.2 Waiver of rights and claims in Copyright and Database Rights when Section 3.1 dedication inapplicable. If the dedication in Section 3.1 does not apply in the relevant jurisdiction under Section 6.4, the Rightsholder waives any rights and claims that the Rightsholder may have or acquire in the future over the Work in:
137
+
138
+
139
+ a. Copyright; and
140
+
141
+
142
+ b. Database Rights.
143
+
144
+
145
+ To the extent possible in the relevant jurisdiction, the above waiver of rights and claims applies worldwide and includes media and formats now known or created in the future. The Rightsholder agrees not to assert the above rights and waives the right to enforce them over the Work.
146
+
147
+
148
+ 3.3 Licence of Copyright and Database Rights when Sections 3.1 and 3.2 inapplicable. If the dedication and waiver in Sections 3.1 and 3.2 does not apply in the relevant jurisdiction under Section 6.4, the Rightsholder and You agree as follows:
149
+
150
+
151
+ a. The Licensor grants to You a worldwide, royalty-free, non-exclusive, licence to Use the Work for the duration of any applicable Copyright and Database Rights. These rights explicitly include commercial use, and do not exclude any field of endeavour. To the extent possible in the relevant jurisdiction, these rights may be exercised in all media and formats whether now known or created in the future.
152
+
153
+
154
+ 3.4 Moral rights. This section covers moral rights, including the right to be identified as the author of the Work or to object to treatment that would otherwise prejudice the author’s honour and reputation, or any other derogatory treatment:
155
+
156
+
157
+ a. For jurisdictions allowing waiver of moral rights, Licensor waives all moral rights that Licensor may have in the Work to the fullest extent possible by the law of the relevant jurisdiction under Section 6.4;
158
+
159
+
160
+ b. If waiver of moral rights under Section 3.4 a in the relevant jurisdiction is not possible, Licensor agrees not to assert any moral rights over the Work and waives all claims in moral rights to the fullest extent possible by the law of the relevant jurisdiction under Section 6.4; and
161
+
162
+
163
+ c. For jurisdictions not allowing waiver or an agreement not to assert moral rights under Section 3.4 a and b, the author may retain their moral rights over the copyrighted aspects of the Work.
164
+
165
+
166
+ Please note that some jurisdictions do not allow for the waiver of moral rights, and so moral rights may still subsist over the work in some jurisdictions.
167
+
168
+
169
+ 4.0 Relationship to other rights
170
+
171
+
172
+ 4.1 No other contractual conditions. The Rightsholder makes this Work available to You without any other contractual obligations, either express or implied. Any Community Norms statement associated with the Work is not a contract and does not form part of this Document.
173
+
174
+
175
+ 4.2 Relationship to patents. This Document does not grant You a licence for any patents that the Rightsholder may own. Users of this Database are cautioned that they may have to clear other rights or consult other licences.
176
+
177
+
178
+ 4.3 Relationship to trade marks. This Document does not grant You a licence for any trade marks that the Rightsholder may own or that the Rightsholder may use to cover the Work. Users of this Database are cautioned that they may have to clear other rights or consult other licences.
179
+
180
+
181
+ Part III: General provisions
182
+
183
+
184
+ 5.0 Warranties, disclaimer, and limitation of liability
185
+
186
+
187
+ 5.1 The Work is provided by the Rightsholder “as is” and without any warranty of any kind, either express or implied, whether of title, of accuracy or completeness, of the presence of absence of errors, of fitness for purpose, or otherwise. Some jurisdictions do not allow the exclusion of implied warranties, so this exclusion may not apply to You.
188
+
189
+
190
+ 5.2 Subject to any liability that may not be excluded or limited by law, the Rightsholder is not liable for, and expressly excludes, all liability for loss or damage however and whenever caused to anyone by any use under this Document, whether by You or by anyone else, and whether caused by any fault on the part of the Rightsholder or not. This exclusion of liability includes, but is not limited to, any special, incidental, consequential, punitive, or exemplary damages. This exclusion applies even if the Rightsholder has been advised of the possibility of such damages.
191
+
192
+
193
+ 5.3 If liability may not be excluded by law, it is limited to actual and direct financial loss to the extent it is caused by proved negligence on the part of the Rightsholder.
194
+
195
+
196
+ 6.0 General
197
+
198
+
199
+ 6.1 If any provision of this Document is held to be invalid or unenforceable, that must not affect the validity or enforceability of the remainder of the terms of this Document.
200
+
201
+
202
+ 6.2 This Document is the entire agreement between the parties with respect to the Work covered here. It replaces any earlier understandings, agreements or representations with respect to the Work not specified here.
203
+
204
+
205
+ 6.3 This Document does not affect any rights that You or anyone else may independently have under any applicable law to make any use of this Work, including (for jurisdictions where this Document is a licence) fair dealing, fair use, database exceptions, or any other legally recognised limitation or exception to infringement of copyright or other applicable laws.
206
+
207
+
208
+ 6.4 This Document takes effect in the relevant jurisdiction in which the Document terms are sought to be enforced. If the rights waived or granted under applicable law in the relevant jurisdiction includes additional rights not waived or granted under this Document, these additional rights are included in this Document in order to meet the intent of this Document.
README.md ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: pddl
3
+ tags:
4
+ - eeg
5
+ - medical
6
+ - clinical
7
+ - classification
8
+ - mtbi
9
+ - tbi
10
+ - oddball
11
+ ---
12
+ # Cavanagh2019: EEG mTBI Classification Dataset with Auditory Oddball Task
13
+ The Cavanagh2019 dataset includes EEG recordings collected during a 3-stimulus auditory oddball paradigm in participants with mild traumatic brain injury (mTBI) and matched healthy controls. A total of 96 participants took part: 45 sub-acute mTBI patients (tested within 2 weeks post-injury), 26 healthy controls, and 25 chronic TBI patients (mild to moderate severity). Sub-acute mTBI and control participants completed two or three EEG sessions - at 3-14 days after the injury, and again after approximately 2 months - while chronic TBI participants completed a single session.
14
+
15
+ The task involved 260 trials: 70% standard tones (440 Hz), 15% target tones (660 Hz), and 15% novel naturalistic sounds. Stimuli were presented binaurally, and participants were instructed to count target tones while ignoring the others. EEG was recorded from 60 channels at a 500 Hz sampling rate.
16
+ ## Paper
17
+ Cavanagh, J. F., Wilson, J. K., Rieger, R. E., Gill, D., Broadway, J. M., Remer, J. H. S., Fratzke, V., Mayer, A. R., & Quinn, D. K. (2019). **ERPs predict symptomatic distress and recovery in sub-acute mild traumatic brain injury**. _Neuropsychologia_, 132, 107125.
18
+
19
+ DISCLAIMER: We (DISCO) are NOT the owners or creators of this dataset, but we merely uploaded it here, to support our's ("EEG-Bench") and other's work on EEG benchmarking.
20
+ ## Dataset Structure
21
+ - `data/` contains the annotated experiment EEG data.
22
+ - `scripts/` contains MATLAB scripts that produced the paper's results.
23
+ - `scripts/BigAgg_Data.mat` contains information about the subjects.
24
+ - `scripts/QUALITY_CHECK.xlsx` and `scripts/QUINN_QUALITY_CHECK.xlsx` contain information about bad quality recordings.
25
+
26
+ A `.mat` file can be read in python as follows:
27
+ ```python
28
+ from scipy.io import loadmat
29
+ mat = loadmat(filepath, simplify_cells=True)
30
+ ```
31
+ (A field "fieldname" can be read from `mat` as `mat["fieldname"]`.)
32
+
33
+ Subject information can be read from `scripts/BigAgg_Data.mat` from the following fields (among others):
34
+ - `DEMO`: information about mTBI and control subjects
35
+ - `ID`: subject IDs, as included in the filename of the corresponding EEG recording under `data/`
36
+ - `Group_CTL1`: for each subject, whether it belongs to the control group (which is the case if and only if the corresponding `Group_CTL1`-entry is `1`) or not
37
+ - `Sex_F1`: gender of the subject (`1` means female, everything else means male)
38
+ - `Age`: age of the subject
39
+ - `Q_DEMO`: information about chronic TBI subjects
40
+ - `URSI`: subject IDs, as included in the filename of the corresponding EEG recording under `data/`
41
+ - `Sex_F1`: gender of the subject (`1` means female, everything else means male)
42
+ - `Age`: age of the subject
43
+ - `NP`: mTBI and control subjects' TOMM, TOPF, HVLT and other scores
44
+ - `Q_NP`: chronic TBI subjects' TOMM, TOPF, HVLT and other scores
45
+ - `QUEX`: mTBI and control BDI and other scores
46
+ - `Q_QUEX`: chronic TBI BDI and other scores
47
+ - `TBIfields`: information about mTBI subjects' injury
48
+ - `Q_TBIfields`: information about chronic TBI subjects' injury
49
+
50
+ ### Filename Format
51
+
52
+ ```
53
+ [PID]_[SESSION]_3AOB.mat (or [PID]_[SESSION]_QUINN_3AOB.mat for chronic TBI participants)
54
+ ```
55
+ PID is the patient ID (e.g. `3001`), while SESSION distinguishes different days of recording (can be `1`, `2` or `3` for patients with mTBI or control patients and is always `1` for patients with chronic TBI).
56
+
57
+ ### Fields in each File
58
+ Let `mat` be an EEG `.mat` file from the `data/` directory.
59
+ Then `mat` contains (among others) the following fields and subfields
60
+ - `EEG`
61
+ - `data`: EEG data of shape `(#channels, trial_len, #trials)`. E.g. a recording of 247 trials/epochs with 60 channels, each trial having a duration of 4 seconds and a sampling rate of 500 Hz will have shape `(60, 2000, 247)`.
62
+ - `event`: Contains a list of dictionaries, each entry (each event) having the following description:
63
+ - `latency`: The onset of the event, measured as the index in the merged time-dimension `#trials x trial_len` (note `#trials` being the _outer_ and `trial_len` being the _inner_ array when merging). The duration of each event is 200ms. Hence, with a 500 Hz sampling rate, the EEG data `event_data` corresponding to the `i`-th event is
64
+ ```python
65
+ start_index = mat["EEG"]["event"][i]["latency"]
66
+ event_data = numpy.transpose(mat["EEG"]["data"], [1, 2]).reshape([num_channels, num_trials * trial_len])[:, start_index:start_index+100] # shape (#channels, 100)
67
+ ```
68
+ - `type`: The type of event. Can be `"S200"` (660 Hz tone), `"S201"` (440 Hz tone) or `"S202"` (naturalistic).
69
+ - `chanlocs`: A list of channel descriptors
70
+ - `nbchan`: Number of channels
71
+ - `trials`: Number of trials/epochs in this recording
72
+ - `srate`: Sampling Rate (Hz)
73
+
74
+ ## License
75
+ By the original authors of this work, this work has been licensed under the PDDL v1.0 license (see LICENSE.txt).
data/14000_1_QUINN_3AOB.mat ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:237765c1f2338d31a2542c2a10e558d1a1238fd70804d2ed94be218c1343c0f1
3
+ size 477373827
data/18367_1_QUINN_3AOB.mat ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ce3926646d3708e761de99c6d8a8e4781fa2ae7f260dc75ea507f558f98c6862
3
+ size 467408442
data/19759_1_QUINN_3AOB.mat ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:96741bacc20feff34dc79a6e0d6273aabf200d5e3cc89af9aa1f5730d695a41c
3
+ size 471988655
data/20543_1_QUINN_3AOB.mat ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ce25742f58da4d907478876304401cc13a0a41be0f689b49efbb86d1ea0f7175
3
+ size 487027572
data/3001_1_3AOB.mat ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d83b16fd4173acc142429f2848c5e27ecbd69b75b134dfaa8904d92f1fc24e8f
3
+ size 207860776
data/3001_2_3AOB.mat ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:864ac7503f94f3936f75aa2249dee8f0ef0038b363a40215e51e2a329c131111
3
+ size 207441186
data/3001_3_3AOB.mat ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6d7ce2ceb6f08dff284037d15c27f67cfbdeced3a9f7a24d6c3d52cabb9f9612
3
+ size 211375742
data/3002_1_3AOB.mat ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a43aa053b4b9a182f082caa642cb07e4bd8989fd362cc131c35fa5e981205d47
3
+ size 205726401
data/3002_2_3AOB.mat ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:84d6d458769d932a10bb7b56f39150cd579262afb87a28aafef1f4fc56366b21
3
+ size 208852720
data/3002_3_3AOB.mat ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:efb2d3ec3011249d25c320e0b7ba413a4d58a3fcef31143994f0a10a61808ebc
3
+ size 207989868
data/3003_1_3AOB.mat ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:76b4fd1c30858080300e9d8bfe0890f400f83d729c87ce5ae0dd3611dde2a5e0
3
+ size 208254692
data/3003_2_3AOB.mat ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:03476a9b854410ef3629eaa6a078d395999908297919179535eae5e04cc4ed2b
3
+ size 209728409
data/3003_3_3AOB.mat ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:61d5575270a301880ec57c348db6eab9a5362d10894469808637014a7bac9f3f
3
+ size 206735253
data/3004_1_3AOB.mat ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c71f52c98b013d605b6c7330bf66dd7b3bb247dc181e1c378819f90892d45264
3
+ size 214132459
data/3004_2_3AOB.mat ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bf7eadb1d134ff4caf497908e4c6666043895bed5cdaba825f8f4446456f8d42
3
+ size 215338995
data/3004_3_3AOB.mat ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2bd90b4f00adb16f0cb46ba5e35e50ac5fe86242ca4017a437f864ab61db36f6
3
+ size 212704950
data/3005_1_3AOB.mat ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4e01e0b6dd770f40833cbaecfb07b6d83396127238f1a26551342d4c9a7bf19a
3
+ size 213404324
data/3005_2_3AOB.mat ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3d6b2e859a03ed4b2b2dfc8a8686edb3f0030fe8b505cfe30bfbcdadbdb20aad
3
+ size 211399673
data/3005_3_3AOB.mat ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3f609344a8517dd49d89a13caf2b6f1f123c1ce9d9050fc39b4ce0e5af906cce
3
+ size 209733608
data/3006_1_3AOB.mat ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:44d9151a92ca04ca8f278edb4417574062ce18820a339efc26c221602b1401c9
3
+ size 207559817
data/3007_1_3AOB.mat ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:49277baee0415db1ec03067051ce9606fd21d85d564646fe887df8f956b809e1
3
+ size 214527656
data/3007_2_3AOB.mat ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b6e92caa9aab318f0aac8422ca46675697c4039ce4e907893f480282c00d863
3
+ size 210261649
data/3007_3_3AOB.mat ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:48e16387bb9591d6ae6480f355f37c300f97496965862d24003967f1e5d3a04d
3
+ size 200034154
data/3008_1_3AOB.mat ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cb85217860131012c298b886c56e75853fb1e1aca705ec40d1001e2af9574a3c
3
+ size 209273569
data/3009_1_3AOB.mat ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0c2fbf9e92a7fb8e082886de823ed73a7a5b482cc920612fa7b5e5239d135ba4
3
+ size 207644251
data/93385_1_QUINN_3AOB.mat ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:93c270eb4c33b18ebf92cd65ec6b4029b9eb66fea64bb0377096712feeedcef6
3
+ size 482612470
data/94391_1_QUINN_3AOB.mat ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bec68e4c6208acba23004f35b3ca28c1c2aae76948452b61fce0106aabe7ef5e
3
+ size 467689543
data/97750_1_QUINN_3AOB.mat ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a4c13374fcbd0f629958a8b22e6f3dc9c966f4751582bd6e23da19079e58a561
3
+ size 474848275
scripts/BV_Chanlocs_60.mat ADDED
Binary file (4.82 kB). View file
 
scripts/BigAgg_Data.mat ADDED
Binary file (26.1 kB). View file
 
scripts/Manuscript_3AOB.m ADDED
@@ -0,0 +1,110 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ %% Step 3 3AOB
2
+ clear all; clc
3
+ addpath('Z:\EXPERIMENTS\mTBICoBRE\EEG\');
4
+ datadir='Z:\EXPERIMENTS\mTBICoBRE\EEG\3AOB Processed\';
5
+ homedir='Z:\EXPERIMENTS\mTBICoBRE\MANUSCRIPT 3AOB\';
6
+ cd(homedir);
7
+
8
+ load('Z:\EXPERIMENTS\mTBICoBRE\EEG\BV_Chanlocs_60.mat');
9
+ tx2disp=-500:2:1000;
10
+
11
+ % Load Data
12
+ s1_Load_Data;
13
+
14
+ % Kill Data
15
+ s2_Kill_Data;
16
+
17
+ %% Demographics
18
+
19
+ s3_Demographics;
20
+
21
+ sx_Predict_Attrition;
22
+
23
+
24
+ %% Example ERPs
25
+
26
+ StdSite=find(strcmpi('FCz',{BV_Chanlocs_60.labels}));
27
+ StdT1=300; StdT2=450;
28
+
29
+ TargSite=find(strcmpi('Pz',{BV_Chanlocs_60.labels}));
30
+ TargT1=400; TargT2=600;
31
+
32
+ NovSite=find(strcmpi('FCz',{BV_Chanlocs_60.labels}));
33
+ NovT1=300; NovT2=450;
34
+
35
+ ERPSITE=[StdSite,TargSite,NovSite];
36
+ ERPWINS=[StdT1,StdT2;TargT1,TargT2;NovT1,NovT2];
37
+ ERPWINS_tx2disp=[[find(tx2disp==StdT1),find(tx2disp==StdT2)];...
38
+ [find(tx2disp==TargT1),find(tx2disp==TargT2)];...
39
+ [find(tx2disp==NovT1),find(tx2disp==NovT2)] ];
40
+
41
+
42
+ s4_Example_ERPs
43
+
44
+ %% ERPs by Group
45
+
46
+ time=1;
47
+
48
+ s5_ERPs_by_Group
49
+
50
+
51
+ %% For SPSS
52
+
53
+ s6_FOR_SPSS
54
+
55
+ figure; boxplot(FORSPSS(:,[10,11,16,17,22,23])); % Raw, Scaled
56
+ skewness(FORSPSS(:,[10,11,16,17,22,23])) % not skewed
57
+
58
+ % Calculate reliability for controls
59
+ CTL_REL=FORSPSS(FORSPSS(:,3)==1,:);
60
+
61
+ [REL.rho.F12,REL.p.F12]=corr(CTL_REL(:,11),CTL_REL(:,17),'type','Spearman','rows','pairwise'); % F_Tot 1 & 2
62
+ [REL.rho.F13,REL.p.F13]=corr(CTL_REL(:,11),CTL_REL(:,23),'type','Spearman','rows','pairwise'); % F_Tot 1 & 3
63
+ [REL.rho.F23,REL.p.F23]=corr(CTL_REL(:,17),CTL_REL(:,23),'type','Spearman','rows','pairwise'); % F_Tot 2 & 3
64
+
65
+ [REL.rho.P3b12,REL.p.P3b12]=corr(CTL_REL(:,11+1),CTL_REL(:,17+1),'type','Spearman','rows','pairwise'); % P3b 1 & 2
66
+ [REL.rho.P3b13,REL.p.P3b13]=corr(CTL_REL(:,11+1),CTL_REL(:,23+1),'type','Spearman','rows','pairwise'); % P3b 1 & 3
67
+ [REL.rho.P3b23,REL.p.P3b23]=corr(CTL_REL(:,17+1),CTL_REL(:,23+1),'type','Spearman','rows','pairwise'); % P3b 2 & 3
68
+
69
+ [REL.rho.P3a12,REL.p.P3a12]=corr(CTL_REL(:,11+2),CTL_REL(:,17+2),'type','Spearman','rows','pairwise'); % P3a 1 & 2
70
+ [REL.rho.P3a13,REL.p.P3a13]=corr(CTL_REL(:,11+2),CTL_REL(:,23+2),'type','Spearman','rows','pairwise'); % P3a 1 & 3
71
+ [REL.rho.P3a23,REL.p.P3a23]=corr(CTL_REL(:,17+2),CTL_REL(:,23+2),'type','Spearman','rows','pairwise'); % P3a 2 & 3
72
+
73
+ %% Correlations
74
+
75
+ DV=IDENTITY.QUEX(:,find(strcmp('F_Tot',IDENTITY_QUEX_HDR)));
76
+
77
+ time=1;
78
+ CONDI4Corr=3; % Std, Targ, Nov
79
+
80
+ s6_Correlations
81
+
82
+ %% Predictions
83
+
84
+ quexidx=find(strcmp('F_Tot',IDENTITY_QUEX_HDR));
85
+ CONDI4Corr=2; % Std, Targ, Nov
86
+
87
+ s6_Correlations_S1EEG_With_FrSBediffs
88
+
89
+ %% -------------- Between-Group rho-to-z
90
+
91
+ % Just type 'em in here from the plots (remember number on plots is df, not N):
92
+
93
+ r1=-.11
94
+ n1=38
95
+ r2=-.46
96
+ n2=23
97
+
98
+ clc;
99
+
100
+ t_r1 = 0.5*log((1+r1)/(1-r1));
101
+ t_r2 = 0.5*log((1+r2)/(1-r2));
102
+ z = (t_r1-t_r2)/sqrt(1/(n1-3)+1/(n2-3))
103
+ p = (1-normcdf(abs(z),0,1))*2
104
+
105
+
106
+ %% -------------- Within-Group rho-to-z
107
+
108
+ s7_Mengs_z
109
+
110
+
scripts/ORIGINAL_README.txt ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ All data are the output of STEP1_3AOB_JFC.m. I didn't include raw data here since that contains things like date and time that need to be stripped out.
2
+
3
+ Files are labeled: ID _ session _ task.mat (e.g. 3001_1_3AOB.mat)
4
+
5
+ These have already been cleaned using APPLE; bad ICs are identified in the QUALITY CHECK.xls sheets.
6
+
7
+ I could be convinced to convert the raw data to .mat data so it could be run through STEP1 and people could clean it as they see fit - but otherwise having the data already pre-processed may help people get to some results quicker.
8
+
9
+ You can run STEP2_3AOB_Process.m to get the processed data, then run Manuscript_3AOB.m to call all the sub-routines that output the data in the paper.
10
+
scripts/QUALITY_CHECK.xlsx ADDED
Binary file (12 kB). View file
 
scripts/QUINN_QUALITY_CHECK.xlsx ADDED
Binary file (8.65 kB). View file
 
scripts/Run_Thresh_1D.m ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ function [Corrected_P] = Run_Thresh_1D(TEMP1,TEMP2,site,ttesttype)
2
+
3
+ SHUFFLES=5000;
4
+ for shuffi=1:SHUFFLES
5
+
6
+ tempA=squeeze(mean(TEMP1(:,site,251:1001),2));
7
+ tempB=squeeze(mean(TEMP2(:,site,251:1001),2));
8
+ tempAB=[tempA;tempB];
9
+ idx=shuffle([ones(1,25),zeros(1,25)]);
10
+ A=tempAB(idx==1,:);
11
+ B=tempAB(idx==0,:);
12
+
13
+ if strmatch(ttesttype,'between')
14
+ [H,P,CI,STATS]=ttest2(A,B);
15
+ elseif strmatch(ttesttype,'within')
16
+ [H,P,CI,STATS]=ttest(A,B);
17
+ end
18
+ P(P<=.05)=NaN; P(P>.05)=0; P(isnan(P))=1;
19
+
20
+ P=squeeze(P);
21
+ l=bwlabel(P);
22
+ if max(l)>0
23
+ for ei=1:max(l)
24
+ idxs = find(l == ei);
25
+ tempthresh(ei) = sum(abs(STATS.tstat(idxs)));
26
+ end
27
+ THRESH(shuffi) = max(tempthresh);
28
+ clear idxs tempthresh ;
29
+ else
30
+ THRESH(shuffi) = 0;
31
+ end
32
+ clear H CI P STATS temp* A B idx l dims lmax;
33
+
34
+ end
35
+ THRESH=sort(THRESH);
36
+ ThisThreshold=THRESH(end-SHUFFLES*.05);
37
+
38
+
39
+ % NOW Run 1D size of effects
40
+ if strmatch(ttesttype,'between')
41
+ [H,P,CI,STATS]=ttest2(squeeze(mean(TEMP1(:,site,251:1001),2)),squeeze(mean(TEMP2(:,site,251:1001),2)));
42
+ elseif strmatch(ttesttype,'within')
43
+ [H,P,CI,STATS]=ttest(squeeze(mean(TEMP1(:,site,251:1001),2)),squeeze(mean(TEMP2(:,site,251:1001),2)));
44
+ end
45
+ P(P<=.05)=NaN; P(P>.05)=0; P(isnan(P))=1;
46
+
47
+ P=squeeze(P);
48
+ l=bwlabel(P);
49
+ Corrected_P=NaN*ones(1,751);
50
+ if max(l)>0
51
+ for ei=1:max(l)
52
+ idxs = find(l == ei);
53
+ if sum(abs(STATS.tstat(idxs))) > ThisThreshold
54
+ Corrected_P(idxs) = 1;
55
+ end
56
+ end
57
+ end
58
+ clear H CI P STATS temp* A B idx l dims lmax idxs;
59
+
60
+ clear THRESH ThisThreshold
61
+
scripts/STEP1_3AOB_JFC.m ADDED
@@ -0,0 +1,192 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ %% 3AOB JFC
2
+ clear all; clc
3
+ addpath('Z:\EXPERIMENTS\mTBICoBRE\EEG\');
4
+ addpath(genpath('Y:\Programs\eeglab12_0_2_1b'));
5
+ rmpath('Y:\Programs\eeglab12_0_2_1b\functions\octavefunc');
6
+ rmpath('Y:\Programs\eeglab14_0_0b\functions\octavefunc');
7
+ datadir='Y:\EEG_Data\mTBICoBRE\'; % Data are here
8
+ saveloc='Z:\EXPERIMENTS\mTBICoBRE\EEG\3AOB Preproc\';
9
+ load('Z:\EXPERIMENTS\mTBICoBRE\EEG\BV_Chanlocs_60.mat');
10
+ cd(saveloc);
11
+
12
+ sx_dirs=dir([datadir,'M*']);
13
+ for sxi=1:length(sx_dirs)
14
+ for ses=1:3
15
+ sessdir=[datadir,sx_dirs(sxi).name,'\eeg\RawEEG\'];
16
+ sx_sess{sxi}{1,ses}=dir([sessdir,'*_',num2str(ses),'_ODDBALL.vhdr']);
17
+ sx_sess{sxi}{2,ses}=sessdir;
18
+ sx_sess{sxi}{3,ses}=sx_dirs(sxi).name;
19
+ end
20
+ end
21
+ LOG=[];
22
+ for sxi=1:size(sx_sess,2)
23
+ for sess=1:3
24
+ if ~isempty( sx_sess{sxi}{1,sess} )
25
+ subno=str2num(sx_sess{sxi}{1,sess}.name(1:4));
26
+ URSI=sx_sess{sxi}{3,sess};
27
+ LOG(subno-3000,sess+1)=subno;
28
+ LOG(subno-3000,1)=str2num(URSI(end-4:end));
29
+ end
30
+ end
31
+ end
32
+
33
+ for sxi=1:size(sx_sess,2)
34
+ for sess=1:3
35
+ if ~isempty( sx_sess{sxi}{1,sess} )
36
+
37
+ subno=str2num(sx_sess{sxi}{1,sess}.name(1:4));
38
+ thisdir=sx_sess{sxi}{2,sess};
39
+ URSI=sx_sess{sxi}{3,sess};
40
+ LOG2(sxi,sess)=subno;
41
+
42
+ if ~exist([saveloc,num2str(subno),'_',num2str(sess),'_3AOB.mat']);
43
+
44
+ % Data are 65 chans: 1=63 is EEG, 64 is VEOG, 65 is EKG Ref'd to CPz - - will want to retrieve that during re-referencing
45
+ EEG = pop_loadbv(thisdir,[num2str(subno),'_',num2str(sess),'_ODDBALL.vhdr']); clc; disp(['Loading ',num2str(subno),' s',num2str(sess)]);
46
+ % Run PATCH for sx<3003 s<2 AND for bad templates
47
+ PATCH
48
+ % Get Locs
49
+ locpath=('Y:\Programs\eeglab12_0_2_1b\plugins\dipfit2.2\standard_BESA\standard-10-5-cap385.elp');
50
+ EEG = pop_chanedit(EEG, 'lookup', locpath);
51
+ EEG = eeg_checkset( EEG );
52
+ % Get event types
53
+ for ai=2:length(EEG.event); clear temp; temp=EEG.event(ai).type;
54
+ if isempty(strmatch('boundary',temp)); TYPES(ai)=str2num(temp(2:end)) ; clear temp; end
55
+ end
56
+ UNIQUE_TYPES=unique(TYPES);
57
+ for ai=1:length(UNIQUE_TYPES); UNIQUE_TYPES_COUNT(ai)=sum(TYPES==UNIQUE_TYPES(ai)); end
58
+ clc; TRIGGERS=[UNIQUE_TYPES;UNIQUE_TYPES_COUNT] % Trigger type, Frequency
59
+
60
+ % Epoch
61
+ All_STIM={'S201','S200','S202'}; % Std, Target, Novel
62
+ EEG = pop_epoch( EEG, All_STIM, [-2 2], 'newname', 'Epochs', 'epochinfo', 'yes');
63
+ EEG = eeg_checkset( EEG );
64
+ % Remove VEOG and EKG
65
+ EEG.EKG=squeeze(EEG.data(65,:,:));
66
+ EEG.VEOG=squeeze(EEG.data(64,:,:));
67
+ EEG.data=EEG.data(1:63,:,:);
68
+ EEG.nbchan=63;
69
+ EEG.chanlocs(65)=[]; EEG.chanlocs(64)=[];
70
+ % Fix BV-specific issue - - - only needed for APPLE
71
+ for ai=1:size(EEG.urevent,2), EEG.urevent(ai).bvtime=EEG.urevent(ai).bvmknum; end
72
+ for ai=1:size(EEG.event,2), EEG.event(ai).bvtime=EEG.event(ai).bvmknum; end
73
+ for ai=1:size(EEG.epoch,2), EEG.epoch(ai).eventbvtime=EEG.epoch(ai).eventbvmknum; end
74
+ % Add CPz
75
+ EEG = pop_chanedit(EEG,'append',63,'changefield',{64 'labels' 'CPz'});
76
+ EEG = pop_chanedit(EEG,'lookup', locpath);
77
+ % Re-Ref to Average Ref and recover CPz
78
+ EEG = pop_reref(EEG,[],'refloc',struct('labels',{'CPz'},'type',{''},'theta',{180},'radius',{0.12662},'X',{-32.9279},'Y',{-4.0325e-15},'Z',{78.363},...
79
+ 'sph_theta',{-180},'sph_phi',{67.208},'sph_radius',{85},'urchan',{64},'ref',{''}),'keepref','on');
80
+ % Remove everything else NOW that CPz has been reconstructed from the total
81
+ EEG.MASTOIDS = squeeze(mean(EEG.data([10,21],:,:),1));
82
+ EEG.data = EEG.data([1:4,6:9,11:20,22:26,28:64],:,:);
83
+ EEG.nbchan=60;
84
+ EEG.chanlocs(27)=[]; EEG.chanlocs(21)=[]; EEG.chanlocs(10)=[]; EEG.chanlocs(5)=[]; % Have to be in this order!
85
+ % Should probably re-ref to average again now that the contaminated channels are gone
86
+ EEG = pop_reref(EEG,[]);
87
+ % Remove mean
88
+ EEG = pop_rmbase(EEG,[],[]);
89
+
90
+
91
+ % ----------------------
92
+ % Setup APPLE to interp chans, reject epochs, & ID bad ICs. Output will be Avg ref'd and ICA'd.
93
+ eeg_chans=1:60;
94
+ Do_ICA=1;
95
+ ref_chan=36; % Re-Ref to FCz [WEIRD STEP, BUT THIS IS FOR FASTER, which is a part of APPLE]
96
+ EEG = pop_reref(EEG,ref_chan,'keepref','on');
97
+
98
+ % Run APPLE (will re-ref data to avg ref)
99
+ [EEG,EEG.bad_chans,EEG.bad_epochs,EEG.bad_ICAs]=APPLE_3AOB(EEG,eeg_chans,ref_chan,Do_ICA,subno,EEG.VEOG,sess,BV_Chanlocs_60);
100
+
101
+ % Save
102
+ save([num2str(subno),'_',num2str(sess),'_3AOB.mat'],'EEG');
103
+ % ----------------------
104
+
105
+ %% Remove the (presumptive) bad ICAs:
106
+ bad_ICAs_To_Remove=EEG.bad_ICAs{2};
107
+ if bad_ICAs_To_Remove==0, bad_ICAs_To_Remove=1; end
108
+ EEG = pop_subcomp( EEG, bad_ICAs_To_Remove, 0);
109
+
110
+
111
+ % Get the good info out of the epochs
112
+ for ai=1:size(EEG.epoch,2)
113
+ % Initialize
114
+ EEG.epoch(ai).CUE=NaN;
115
+ for bi=1:size(EEG.epoch(ai).eventlatency,2)
116
+ % Get STIMTYPE
117
+ if EEG.epoch(ai).eventlatency{bi}==0 && isempty(strmatch(EEG.epoch(ai).eventtype{bi},'N999')); % If this bi is the event
118
+ % Get StimType
119
+ FullName=EEG.epoch(ai).eventtype{bi};
120
+ EEG.epoch(ai).CUE=str2num(FullName(2:end)) ;
121
+ clear FullName
122
+ VECTOR(ai,1)=EEG.epoch(ai).CUE;
123
+ end
124
+ end
125
+ end
126
+
127
+ % Let's just do this for display
128
+ dims=size(EEG.data);
129
+ EEG.data=eegfilt(EEG.data,500,[],20);
130
+ EEG.data=reshape(EEG.data,dims(1),dims(2),dims(3));
131
+
132
+ % Set Params
133
+ tx=-2000:2:1998;
134
+ b1=find(tx==-200); b2=find(tx==0);
135
+ t1=find(tx==-500); t2=find(tx==1000);
136
+ toporange1=find(tx==250); toporange2=find(tx==600); toporangetot=250:2:600;
137
+ tx2disp=-500:2:1000;
138
+ MAPLIMS=[-8 8];
139
+
140
+ % Basecor your ERPs here so they are pretty.
141
+ BASE=squeeze( mean(EEG.data(:,b1:b2,:),2) );
142
+ for ai=1:dims(1)
143
+ EEG.data(ai,:,:)=squeeze(EEG.data(ai,:,:))-repmat( BASE(ai,:),dims(2),1 );
144
+ end
145
+
146
+
147
+ % Get max of P2 across all condis
148
+ site=11; % Pz
149
+ ERP4topo=mean(EEG.data(site,toporange1:toporange2,VECTOR(:,1)==200),3);
150
+ topomax_P3b=toporangetot(find(ERP4topo==max(ERP4topo)));
151
+ topotoplot_P3b=find(tx==topomax_P3b);
152
+ site=36; % FCz
153
+ ERP4topo=mean(EEG.data(site,toporange1:toporange2,VECTOR(:,1)==202),3);
154
+ topomax_P3a=toporangetot(find(ERP4topo==max(ERP4topo)));
155
+ topotoplot_P3a=find(tx==topomax_P3a);
156
+ % --------------
157
+ figure;
158
+ site=11; % Pz
159
+ subplot(3,4,1:4); hold on
160
+ plot(tx2disp,mean(EEG.data(site,t1:t2,VECTOR(:,1)==201),3),'k');
161
+ plot(tx2disp,mean(EEG.data(site,t1:t2,VECTOR(:,1)==200),3),'r');
162
+ plot(tx2disp,mean(EEG.data(site,t1:t2,VECTOR(:,1)==202),3),'b');
163
+ plot([topomax_P3b topomax_P3b],[-2 2],'m','linewidth',2); % indicate the max with a magenta line
164
+ title(['Pz Subno: ',num2str(subno),' Sess:',num2str(sess)]);
165
+ legend({'Std','Target','Novel'},'Location','NorthWest');
166
+ % --------------
167
+ site=36; % FCz
168
+ subplot(3,4,5:8); hold on
169
+ plot(tx2disp,mean(EEG.data(site,t1:t2,VECTOR(:,1)==201),3),'k');
170
+ plot(tx2disp,mean(EEG.data(site,t1:t2,VECTOR(:,1)==200),3),'r');
171
+ plot(tx2disp,mean(EEG.data(site,t1:t2,VECTOR(:,1)==202),3),'b');
172
+ plot([topomax_P3a topomax_P3a],[-2 2],'m','linewidth',2); % indicate the max with a magenta line
173
+ title(['FCz Subno: ',num2str(subno),' Sess:',num2str(sess)]);
174
+ % --------------
175
+ subplot(3,4,9); hold on
176
+ topoplot( mean(EEG.data(:,topotoplot_P3b,VECTOR(:,1)==201),3) , BV_Chanlocs_60,'maplimits',MAPLIMS); title('Std @ P3b')
177
+ subplot(3,4,10); hold on
178
+ topoplot( mean(EEG.data(:,topotoplot_P3b,VECTOR(:,1)==200),3) , BV_Chanlocs_60,'maplimits',MAPLIMS); title('Targ')
179
+ subplot(3,4,11); hold on
180
+ topoplot( mean(EEG.data(:,topotoplot_P3a,VECTOR(:,1)==202),3) , BV_Chanlocs_60,'maplimits',MAPLIMS); title('Novel')
181
+
182
+ saveas(gcf, [num2str(subno),'_',num2str(sess),'_3AOB_ERPs.png'],'png');
183
+ close all;
184
+
185
+ clear EEG VECTOR BASE PROBE TRIGGERS TYPES UNIQUE* did* topo* ERP* URSI dims eeg_chans;
186
+ end
187
+ end
188
+ end
189
+ end
190
+
191
+ %%
192
+
scripts/STEP2_3AOB_Process.m ADDED
@@ -0,0 +1,250 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ %% Step 2 Oddball
2
+
3
+ clear all; clc
4
+ addpath('Z:\EXPERIMENTS\mTBICoBRE\EEG\');
5
+ savedir='Z:\EXPERIMENTS\mTBICoBRE\EEG\3AOB Processed\';
6
+
7
+ load('Z:\EXPERIMENTS\mTBICoBRE\EEG\BV_Chanlocs_60.mat');
8
+
9
+ % ########## For Cavanagh data
10
+ datadir='Z:\EXPERIMENTS\mTBICoBRE\EEG\3AOB Preproc\';
11
+ [D_DAT,D_HDR,D_ALL]=xlsread('Z:\EXPERIMENTS\mTBICoBRE\ANALYSIS\QUALITY_CHECK.xlsx','ODDBALL_ICAs');
12
+ FILEENDER='_3AOB.mat';
13
+
14
+ % % % ########## For Quinn data
15
+ % % datadir='Z:\EXPERIMENTS\mTBICoBRE\EEG\QUINN 3AOB Preproc\';
16
+ % % [D_DAT,D_HDR,D_ALL]=xlsread('Z:\EXPERIMENTS\mTBICoBRE\ANALYSIS\QUINN_QUALITY_CHECK.xlsx','ODDBALL_ICAs');
17
+ % % FILEENDER='_QUINN_3AOB.mat';
18
+
19
+ cd(datadir);
20
+
21
+ % ############# Set Params
22
+ srate=500;
23
+ tx=-2000:1000/srate:1998;
24
+ B1=find(tx==-300); B2=find(tx==-200);
25
+ T1=find(tx==-500); T2=find(tx==1000);
26
+ tx2disp=-500:2:1000;
27
+ % #############
28
+
29
+
30
+ for si=1:length(D_DAT)
31
+ for sess=1:size(D_DAT,2)-1 % should be '2' for Quinn data, '3' for Cavanagh data
32
+
33
+ subno=D_DAT(si,1);
34
+ skip=0;
35
+
36
+ INFO=D_ALL{si+1,sess+1}; % +1's b/c of subno column and header row
37
+ disp(['TRYOUT ',num2str(subno),' S',num2str(sess)]);
38
+
39
+ if isnumeric(INFO), bad_ICAs_To_Remove=INFO; end
40
+ if isnan(INFO), skip=1; end % not done yet
41
+ if strmatch('BAD',INFO), skip=1; end % Bad data
42
+ if ~isnumeric(INFO), bad_ICAs_To_Remove=str2num(INFO); end
43
+
44
+ % Don't repeat if already done
45
+ if exist([savedir,num2str(subno),'_',num2str(sess),'_3AOB_TFandERPs_L.mat'])==2, skip=1; end
46
+
47
+ if skip==0
48
+
49
+ load([num2str(subno),'_',num2str(sess),FILEENDER]); disp(['DOING: ',num2str(subno),'_',num2str(sess),'_3AOB.mat']);
50
+
51
+ % Remove the bad ICAs:
52
+ disp(['BAD ICAS: ', num2str(bad_ICAs_To_Remove)]);
53
+ EEG = pop_subcomp( EEG, bad_ICAs_To_Remove, 0);
54
+
55
+ % Get the good info out of the epochs
56
+ for ai=1:size(EEG.epoch,2)
57
+ % Initialize
58
+ EEG.epoch(ai).EEG=NaN;
59
+ for bi=1:size(EEG.epoch(ai).eventlatency,2)
60
+ % Get STIMTYPE
61
+ if EEG.epoch(ai).eventlatency{bi}==0 && isempty(strmatch(EEG.epoch(ai).eventtype{bi},'N999')); % If this bi is the event
62
+ % Get StimType
63
+ FullName=EEG.epoch(ai).eventtype{bi};
64
+ EEG.epoch(ai).EEG=str2num(FullName(2:end)) ;
65
+
66
+ clear FullName
67
+ VECTOR(ai,1)=EEG.epoch(ai).EEG; All_STIM={'S201','S200','S202'}; % Std, Target, Novel
68
+ end
69
+ end
70
+ end
71
+
72
+ % Only as many STD as NOV
73
+ N_n=sum(VECTOR(:,1)==202);
74
+ temp_idxs=find(VECTOR(:,1)==201);
75
+ temp_idxs=shuffle(temp_idxs);
76
+ VECTOR(temp_idxs(N_n+1:end),1)=999; clear temp_idxs;
77
+ % Save trial counts
78
+ TRL_ct(1)=sum(VECTOR(:,1)==201);
79
+ TRL_ct(2)=sum(VECTOR(:,1)==200);
80
+ TRL_ct(3)=sum(VECTOR(:,1)==202);
81
+
82
+
83
+ %%
84
+ % $$$$$$$$$$$$$$$$$$$$$$$ $$$$$$$$$$$$$$$$$$$$$$$
85
+ % $$$$$$$$$$$$$$$$$$$$$$$ Time-Freq
86
+ % $$$$$$$$$$$$$$$$$$$$$$$ $$$$$$$$$$$$$$$$$$$$$$$
87
+
88
+ % Setup Wavelet Params
89
+ num_freqs=50;
90
+ frex=logspace(.01,1.7,num_freqs);
91
+ s=logspace(log10(3),log10(10),num_freqs)./(2*pi*frex);
92
+ t=-2:1/EEG.srate:2;
93
+
94
+ % Definte Convolution Parameters
95
+ dims = size(EEG.data);
96
+ n_wavelet = length(t);
97
+ n_data = dims(2)*dims(3);
98
+ n_convolution = n_wavelet+n_data-1;
99
+ n_conv_pow2 = pow2(nextpow2(n_convolution));
100
+ half_of_wavelet_size = (n_wavelet-1)/2;
101
+
102
+ % For Laplacian
103
+ X = [BV_Chanlocs_60.X]; Y = [BV_Chanlocs_60.Y]; Z = [BV_Chanlocs_60.Z];
104
+
105
+ % Pick channel
106
+ chans=[36,33,56]; % FCz, F5, F6
107
+
108
+ for REFi=1:2
109
+ if REFi==1, TAG='V';
110
+ elseif REFi==2, TAG='L';
111
+ [EEG.data,~,~] = laplacian_perrinX(EEG.data,X,Y,Z,[],1e-6);
112
+ end
113
+
114
+ % Get FFT of data
115
+ for chani=1:3
116
+ EEG_fft(chani,:) = fft(reshape(EEG.data(chans(chani),:,:),1,n_data),n_conv_pow2);
117
+ end
118
+
119
+ for fi=1:num_freqs
120
+
121
+ wavelet = fft( exp(2*1i*pi*frex(fi).*t) .* exp(-t.^2./(2*(s(fi)^2))) , n_conv_pow2 ); % sqrt(1/(s(fi)*sqrt(pi))) *
122
+
123
+ % convolution
124
+ for chani=1:3
125
+ temp_conv = ifft(wavelet.*EEG_fft(chani,:));
126
+ temp_conv = temp_conv(1:n_convolution);
127
+ temp_conv = temp_conv(half_of_wavelet_size+1:end-half_of_wavelet_size);
128
+ EEG_conv(chani,:,:) = reshape(temp_conv,dims(2),dims(3));
129
+ clear temp_conv;
130
+
131
+ % Common pre-EEG baseline
132
+ temp_BASE(chani,:) = mean(mean(abs(EEG_conv(chani,B1:B2,:)).^2,2),3);
133
+ end
134
+
135
+ for idx=1:3
136
+
137
+ if idx==1, idx_V=VECTOR(:,1)==201; % STD
138
+ elseif idx==2, idx_V=VECTOR(:,1)==200; % TARG
139
+ elseif idx==3, idx_V=VECTOR(:,1)==202; % NOV
140
+ end
141
+
142
+ for chani=1:3
143
+ temp_PWR = squeeze(mean(abs(EEG_conv(chani,T1:T2,idx_V)).^2,3));
144
+
145
+ POWER(chani,fi,:,idx) = 10* ( log10(temp_PWR') - log10(repmat(temp_BASE(chani,:),size(tx2disp,2),1)) );
146
+ ITPC(chani,fi,:,idx) = abs(mean(exp(1i*( angle(EEG_conv(chani,T1:T2,idx_V)) )),3));
147
+ if chani==1, seed=1; targ=2;
148
+ elseif chani==2, seed=1; targ=3;
149
+ elseif chani==3, seed=2; targ=3;
150
+ end
151
+ ISPC(chani,fi,:,idx) = abs(mean(exp(1i*( angle(EEG_conv(seed,T1:T2,idx_V)) - angle(EEG_conv(targ,T1:T2,idx_V)) )),3));
152
+
153
+ clear temp_PWR;
154
+ end
155
+ clear idx_V ;
156
+ end
157
+ clear wavelet idx_V temp_BASE EEG_conv;
158
+ end
159
+
160
+ %%
161
+ % $$$$$$$$$$$$$$$$$$$$$$$ $$$$$$$$$$$$$$$$$$$$$$$
162
+ % $$$$$$$$$$$$$$$$$$$$$$$ Theta Topo
163
+ % $$$$$$$$$$$$$$$$$$$$$$$ $$$$$$$$$$$$$$$$$$$$$$$
164
+
165
+ topofrex=4.5;
166
+ s=logspace(log10(3),log10(10),num_freqs)./(2*pi*topofrex);
167
+ wavelet = fft( exp(2*1i*pi*frex(fi).*t) .* exp(-t.^2./(2*(s(fi)^2))) , n_conv_pow2 ); % sqrt(1/(s(fi)*sqrt(pi))) *
168
+
169
+ seed=36;
170
+
171
+ EEG_fft_4TOPO = fft(reshape(EEG.data(seed,:,:),1,n_data),n_conv_pow2);
172
+ seed_EEG_conv_4TOPO = ifft(wavelet.*EEG_fft_4TOPO);
173
+ seed_EEG_conv_4TOPO = seed_EEG_conv_4TOPO(1:n_convolution);
174
+ seed_EEG_conv_4TOPO = seed_EEG_conv_4TOPO(half_of_wavelet_size+1:end-half_of_wavelet_size);
175
+ seed_EEG_conv_4TOPO = reshape(seed_EEG_conv_4TOPO,dims(2),dims(3));
176
+ clear EEG_fft_4TOPO ;
177
+
178
+ % Common pre-EEG SEED baseline
179
+ seed_BASE = mean(mean(abs(seed_EEG_conv_4TOPO(B1:B2,:)).^2,1),2);
180
+
181
+ for chani=1:60
182
+
183
+ EEG_fft_4TOPO = fft(reshape(EEG.data(chani,:,:),1,n_data),n_conv_pow2);
184
+ EEG_conv_4TOPO = ifft(wavelet.*EEG_fft_4TOPO);
185
+ EEG_conv_4TOPO = EEG_conv_4TOPO(1:n_convolution);
186
+ EEG_conv_4TOPO = EEG_conv_4TOPO(half_of_wavelet_size+1:end-half_of_wavelet_size);
187
+ EEG_conv_4TOPO = reshape(EEG_conv_4TOPO,dims(2),dims(3));
188
+
189
+ % Common pre-EEG baseline
190
+ temp_BASE = mean(mean(abs(EEG_conv_4TOPO(B1:B2,:)).^2,1),2);
191
+
192
+ for idx=1:3
193
+ if idx==1, idx_V=VECTOR(:,1)==201; % STD
194
+ elseif idx==2, idx_V=VECTOR(:,1)==200; % TARG
195
+ elseif idx==3, idx_V=VECTOR(:,1)==202; % NOV
196
+ end
197
+
198
+ temp_PWR = squeeze(mean(abs(EEG_conv_4TOPO(T1:T2,idx_V)).^2,2));
199
+ POWER_TOPO(chani,:,idx) = 10* ( log10(temp_PWR) - log10(repmat(temp_BASE,size(tx2disp,2),1)) );
200
+
201
+ S4cor=10* ( log10(abs(seed_EEG_conv_4TOPO(T1:T2,idx_V)).^2) - log10(repmat(seed_BASE,size(tx2disp,2),sum(idx_V))) );
202
+ T4cor=10* ( log10(abs(EEG_conv_4TOPO(T1:T2,idx_V)).^2) - log10(repmat(temp_BASE,size(tx2disp,2),sum(idx_V))) );
203
+ CORREL_TOPO(chani,:,idx)= diag(corr(S4cor',T4cor','type','Spearman'));
204
+
205
+ SYNCH_TOPO(chani,:,idx) = abs(mean(exp(1i*( angle(seed_EEG_conv_4TOPO(T1:T2,idx_V)) - angle(EEG_conv_4TOPO(T1:T2,idx_V)) )),2));
206
+
207
+ clear idx_V temp_PWR S4cor T4cor;
208
+ end
209
+
210
+ clear EEG_fft_4TOPO EEG_conv_4TOPO TOPO_conv temp_BASE;
211
+ end
212
+
213
+ %%
214
+ % $$$$$$$$$$$$$$$$$$$$$$$ $$$$$$$$$$$$$$$$$$$$$$$
215
+ % $$$$$$$$$$$$$$$$$$$$$$$ ERPs
216
+ % $$$$$$$$$$$$$$$$$$$$$$$ $$$$$$$$$$$$$$$$$$$$$$$
217
+
218
+ % Filter
219
+ dims=size(EEG.data);
220
+ EEG.data=eegfilt(EEG.data,500,[],20);
221
+ EEG.data=eegfiltfft(EEG.data,500,.1,[]);
222
+ EEG.data=reshape(EEG.data,dims(1),dims(2),dims(3));
223
+
224
+ % Basecor your ERPs here so they are pretty.
225
+ EEG_BASE=squeeze( mean(EEG.data(:,find(tx==-200):find(tx==0),:),2) );
226
+ for ai=1:dims(1)
227
+ EEG.data(ai,:,:)=squeeze(EEG.data(ai,:,:))-repmat( EEG_BASE(ai,:),dims(2),1 );
228
+ end
229
+
230
+ % Get ERPs
231
+ for idx=1:3
232
+ if idx==1, idx_V=VECTOR(:,1)==201; % STD
233
+ elseif idx==2, idx_V=VECTOR(:,1)==200; % TARG
234
+ elseif idx==3, idx_V=VECTOR(:,1)==202; % NOV
235
+ end
236
+ ERP(:,:,idx)=squeeze(mean(EEG.data(:,find(tx==-500):find(tx==1000),idx_V),3));
237
+ clear DATA_erp idx_V ;
238
+ end
239
+
240
+ save([savedir,num2str(subno),'_',num2str(sess),'_3AOB_TFandERPs_',TAG,'.mat'],'ERP','ISPC','ITPC','POWER','VECTOR','SYNCH_TOPO','TRL_ct','POWER_TOPO','CORREL_TOPO');
241
+
242
+ clear ERP ISPC ITPC POWER RT;
243
+ end
244
+
245
+ clearvars -except datadir savedir FILEENDER BV_Chanlocs_60 D_DAT D_HDR D_ALL tx B1 B2 T1 T2 tx2disp si sess
246
+ end
247
+ end
248
+ end
249
+
250
+ %%
scripts/s1_Load_Data.m ADDED
@@ -0,0 +1,121 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ cd(datadir);
3
+
4
+ filz=dir(['*_3AOB_TFandERPs_V.mat']);
5
+ Nsubjs=length(filz);
6
+
7
+ % Load BigAgg_Data
8
+ load('Z:\EXPERIMENTS\mTBICoBRE\MANUSCRIPT 3AOB\BigAgg_Data.mat');
9
+
10
+ % Preallocate
11
+ IDENTITY.DEMO=NaN(Nsubjs,7);
12
+ IDENTITY.TBI=NaN(Nsubjs,9);
13
+ IDENTITY.NP=NaN(Nsubjs,8);
14
+ IDENTITY.QUEX=NaN(Nsubjs,25);
15
+ % ^^^^^^^^^^
16
+ MEGA_PWR=NaN(Nsubjs,3,50,751,3);
17
+ MEGA_PHS=NaN(Nsubjs,3,50,751,3);
18
+ MEGA_SYNCH=NaN(Nsubjs,3,50,751,3);
19
+ MEGA_SYNCH_TOPO=NaN(Nsubjs,60,751,3);
20
+ MEGA_POWER_TOPO=NaN(Nsubjs,60,751,3);
21
+ MEGA_CORREL_TOPO=NaN(Nsubjs,60,751,3);
22
+ MEGA_ERP=NaN(Nsubjs,60,751,3);
23
+ MEGA_TRL_ct=NaN(Nsubjs,3);
24
+
25
+ for si=1:Nsubjs
26
+
27
+ subno = str2double(filz(si).name(1:end-23)) ; % B/C some Quinn ones have 5 digit IDs, some 4
28
+ session = str2double(filz(si).name(end-21)) ;
29
+
30
+ if subno<3500 % Cavanagh
31
+ if mod(subno,2)==1, group=1; % ODD - Ctl
32
+ elseif mod(subno,2)==0,group=2; % EVEN - mTBI
33
+ end
34
+ else % Quinn
35
+ group=3; % Chronic TBI (cTBI)
36
+ end
37
+
38
+ IDENTITY_DEMO_HDR{1}={'subno'}; IDENTITY_DEMO_HDR{2}='session'; IDENTITY_DEMO_HDR{3}='group';
39
+ IDENTITY_TBI_HDR{1}={'subno'}; IDENTITY_TBI_HDR{2}='session'; IDENTITY_TBI_HDR{3}='group';
40
+ IDENTITY_NP_HDR{1}={'subno'}; IDENTITY_NP_HDR{2}='session'; IDENTITY_NP_HDR{3}='group';
41
+ IDENTITY_QUEX_HDR{1}={'subno'}; IDENTITY_QUEX_HDR{2}='session'; IDENTITY_QUEX_HDR{3}='group';
42
+
43
+ IDENTITY.DEMO(si,1:3)=[subno,session,group];
44
+ IDENTITY.TBI(si,1:3)=[subno,session,group];
45
+ IDENTITY.NP(si,1:3)=[subno,session,group];
46
+ IDENTITY.QUEX(si,1:3)=[subno,session,group];
47
+
48
+ % --------------- QUEX
49
+ if group<3 % Cavanagh data
50
+ if any(DEMO.ID(:,1)==subno)
51
+ bigagg_idx=find(DEMO.ID(:,1)==subno);
52
+ IDENTITY.DEMO(si,4)=DEMO.URSI(bigagg_idx,1); IDENTITY_DEMO_HDR{4}='URSI';
53
+ IDENTITY.DEMO(si,5)=DEMO.Sex_F1(bigagg_idx); IDENTITY_DEMO_HDR{5}='SexF1';
54
+ IDENTITY.DEMO(si,6)=DEMO.Age(bigagg_idx); IDENTITY_DEMO_HDR{6}='Age';
55
+ IDENTITY.DEMO(si,7)=DEMO.SES(bigagg_idx); IDENTITY_DEMO_HDR{7}='YrsEd'; % That's what this actually is.
56
+ if session==1
57
+ IDENTITY.TBI(si,4)=TBIfields.Glasgow(bigagg_idx); IDENTITY_TBI_HDR{4}='GCS';
58
+ IDENTITY.TBI(si,5)=TBIfields.LOC(bigagg_idx); IDENTITY_TBI_HDR{5}='LOC';
59
+ IDENTITY.TBI(si,6)=TBIfields.LOCtime(bigagg_idx); IDENTITY_TBI_HDR{6}='LOCtime';
60
+ IDENTITY.TBI(si,7)=TBIfields.LOM(bigagg_idx); IDENTITY_TBI_HDR{7}='LOM';
61
+ IDENTITY.TBI(si,8)=TBIfields.DaysSinceInjury(bigagg_idx); IDENTITY_TBI_HDR{8}='Days';
62
+ % ---------------------
63
+ IDENTITY.NP(si,4)=NP.TOPF_Score(bigagg_idx); IDENTITY_NP_HDR{4}='TOPF';
64
+ IDENTITY.NP(si,5)=NP.Coding(bigagg_idx); IDENTITY_NP_HDR{5}='Coding';
65
+ IDENTITY.NP(si,6)=NP.SPAN.Tot(bigagg_idx); IDENTITY_NP_HDR{6}='Span';
66
+ IDENTITY.NP(si,7)=mean([NP.HVLT.T1(bigagg_idx),NP.HVLT.T2(bigagg_idx),NP.HVLT.T3(bigagg_idx)]'); IDENTITY_NP_HDR{7}='HVLT13';
67
+ IDENTITY.NP(si,8)=NP.HVLT.DelayRecall(bigagg_idx); IDENTITY_NP_HDR{8}='HVLTdelay';
68
+ end
69
+ IDENTITY.QUEX(si,4)=QUEX.BDI(bigagg_idx,session); IDENTITY_QUEX_HDR{4}='BDI';
70
+ IDENTITY.QUEX(si,5)=QUEX.NSI.tot(bigagg_idx,session); IDENTITY_QUEX_HDR{5}='NSItot';
71
+ IDENTITY.QUEX(si,6)=QUEX.NSI.somatic(bigagg_idx,session); IDENTITY_QUEX_HDR{6}='NSIsom';
72
+ IDENTITY.QUEX(si,7)=QUEX.NSI.cog(bigagg_idx,session); IDENTITY_QUEX_HDR{7}='NSIcog';
73
+ IDENTITY.QUEX(si,8)=QUEX.NSI.emo(bigagg_idx,session); IDENTITY_QUEX_HDR{8}='NSIemo';
74
+ IDENTITY.QUEX(si,9)=QUEX.FRSBE.Tot_B4(bigagg_idx,session); IDENTITY_QUEX_HDR{9}='F_Tot_B4';
75
+ IDENTITY.QUEX(si,10)=QUEX.FRSBE.Tot_Now(bigagg_idx,session); IDENTITY_QUEX_HDR{10}='F_Tot';
76
+ IDENTITY.QUEX(si,11)=EX.EX(bigagg_idx,session); IDENTITY_QUEX_HDR{11}='EX_EX';
77
+ IDENTITY.QUEX(si,12)=EX.CC(bigagg_idx,session); IDENTITY_QUEX_HDR{12}='EX_CC';
78
+ IDENTITY.QUEX(si,13)=EX.FL(bigagg_idx,session); IDENTITY_QUEX_HDR{13}='EX_FL';
79
+ IDENTITY.QUEX(si,14)=EX.WM(bigagg_idx,session); IDENTITY_QUEX_HDR{14}='EX_WM';
80
+ end
81
+ elseif group==3
82
+ if any(Q_DEMO.URSI==subno)
83
+ bigagg_idx=find(Q_DEMO.URSI==subno);
84
+ %^^^^^^^^^^^
85
+ IDENTITY.DEMO(si,4)=Q_DEMO.URSI(bigagg_idx,1);
86
+ IDENTITY.DEMO(si,5)=Q_DEMO.Sex_F1(bigagg_idx);
87
+ IDENTITY.DEMO(si,6)=Q_DEMO.Age(bigagg_idx);
88
+ IDENTITY.DEMO(si,7)=Q_DEMO.SES(bigagg_idx);
89
+ % ---------------------
90
+ IDENTITY.TBI(si,6)=Q_TBIfields.LOCdurMINS(bigagg_idx);
91
+ IDENTITY.TBI(si,9)=Q_TBIfields.YearsSinceInjury(bigagg_idx); IDENTITY_TBI_HDR{9}='Years';
92
+ % ---------------------
93
+ IDENTITY.NP(si,4)=Q_NP.TOPF_Score(bigagg_idx);
94
+ IDENTITY.NP(si,5)=Q_NP.Coding(bigagg_idx);
95
+ IDENTITY.NP(si,6)=Q_NP.SPAN.Tot(bigagg_idx);
96
+ IDENTITY.NP(si,7)=mean([Q_NP.HVLT.T1(bigagg_idx),Q_NP.HVLT.T2(bigagg_idx),Q_NP.HVLT.T3(bigagg_idx)]');
97
+ IDENTITY.NP(si,8)=Q_NP.HVLT.DelayRecall(bigagg_idx);
98
+ % ---------------------
99
+ IDENTITY.QUEX(si,4)=Q_QUEX.BDI(bigagg_idx);
100
+ IDENTITY.QUEX(si,5)=Q_QUEX.NSI.tot(bigagg_idx);
101
+ IDENTITY.QUEX(si,6)=Q_QUEX.NSI.somatic(bigagg_idx);
102
+ IDENTITY.QUEX(si,7)=Q_QUEX.NSI.cog(bigagg_idx);
103
+ IDENTITY.QUEX(si,8)=Q_QUEX.NSI.emo(bigagg_idx);
104
+ IDENTITY.QUEX(si,9)=Q_QUEX.FRSBE.RAW_Tot_Now(bigagg_idx);
105
+ IDENTITY.QUEX(si,10)=Q_QUEX.FRSBE.Tot_Now(bigagg_idx);
106
+ end
107
+ end
108
+ clear bigagg_idx
109
+
110
+ % EEG
111
+ load([filz(si).name(1:end-5),'V.mat'],'ERP','TRL_ct');
112
+ MEGA_ERP(si,:,:,:)=ERP;
113
+ MEGA_TRL_ct(si,:)=TRL_ct;
114
+
115
+ clear ERP ISPC ITPC POWER VECTOR RT SYNCH_TOPO TRL_ct subno session group BEH ACC RT POWER_TOPO CORREL_TOPO;
116
+
117
+ end
118
+
119
+ clear DEMO QUEX NP EX TBIfields Q_*
120
+
121
+ cd(homedir);
scripts/s2_Kill_Data.m ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ % Kill Quinn S2
2
+ for si=1:length(IDENTITY.DEMO)
3
+ if IDENTITY.DEMO(si,3)==3 && IDENTITY.DEMO(si,2)==2
4
+ IDENTITY.DEMO(si,:)=NaN; IDENTITY.TBI(si,:)=NaN; IDENTITY.NP(si,:)=NaN; IDENTITY.QUEX(si,:)=NaN;
5
+ % % MEGA_PWR(si,:,:,:,:)=NaN;
6
+ % % MEGA_PHS(si,:,:,:,:)=NaN;
7
+ % % MEGA_SYNCH(si,:,:,:,:)=NaN;
8
+ % % MEGA_SYNCH_TOPO(si,:,:,:)=NaN;
9
+ MEGA_ERP(si,:,:,:)=NaN;
10
+ end
11
+ end
12
+
13
+ % Kill malingering Quinn patient # 43047 (is already excluded... but this will make sure!)
14
+ if any(IDENTITY.DEMO(:,1)==43047); BOOM; end
15
+
16
+ % Kill any Quinn patients who were Cavanagh patients
17
+ % F48 3032(Cav URSI: 30454; Quinn URSI: 35957) & F22 3004(Cav URSI: 69117; Quinn URSI: 48880)
18
+ badidx=find(IDENTITY.DEMO(:,1)==35957);
19
+ IDENTITY.DEMO(badidx,:)=NaN; IDENTITY.TBI(badidx,:)=NaN; IDENTITY.NP(badidx,:)=NaN; IDENTITY.QUEX(badidx,:)=NaN; clear badidx;
20
+ % The other was 3004, who is killed below due to no LOC
21
+
22
+ % Kill 2 mTBI with out LOC 3004, 3056 == no LOC
23
+ badidx=find(IDENTITY.DEMO(:,1)==3004);
24
+ IDENTITY.DEMO(badidx,:)=NaN; IDENTITY.TBI(badidx,:)=NaN; IDENTITY.NP(badidx,:)=NaN; IDENTITY.QUEX(badidx,:)=NaN; clear badidx;
25
+ badidx=find(IDENTITY.DEMO(:,1)==3056);
26
+ IDENTITY.DEMO(badidx,:)=NaN; IDENTITY.TBI(badidx,:)=NaN; IDENTITY.NP(badidx,:)=NaN; IDENTITY.QUEX(badidx,:)=NaN; clear badidx;
27
+
28
+ % Kill people with pre-existing head injuries
29
+ badidx=find(IDENTITY.DEMO(:,1)==3024);
30
+ IDENTITY.DEMO(badidx,:)=NaN; IDENTITY.TBI(badidx,:)=NaN; IDENTITY.NP(badidx,:)=NaN; IDENTITY.QUEX(badidx,:)=NaN; clear badidx;
31
+
32
+ % Kill *sessions* if they had an intervening head injury
33
+ for si=1:length(IDENTITY.DEMO)
34
+ if IDENTITY.DEMO(si,1)==3034 && IDENTITY.DEMO(si,2)==3
35
+ badidx=si;
36
+ end
37
+ end
38
+ IDENTITY.DEMO(badidx,:)=NaN; IDENTITY.TBI(badidx,:)=NaN; IDENTITY.NP(badidx,:)=NaN; IDENTITY.QUEX(badidx,:)=NaN; clear badidx;
39
+
40
+ % Kill people with TOMM score<45
41
+ badidx=find(IDENTITY.DEMO(:,1)==14000);
42
+ IDENTITY.DEMO(badidx,:)=NaN; IDENTITY.TBI(badidx,:)=NaN; IDENTITY.NP(badidx,:)=NaN; IDENTITY.QUEX(badidx,:)=NaN; clear badidx;
43
+
44
+ % Assessment > 2 weeks
45
+ badidx=find(IDENTITY.TBI(:,8)>14);
46
+ IDENTITY.DEMO(badidx,:)=NaN; IDENTITY.TBI(badidx,:)=NaN; IDENTITY.NP(badidx,:)=NaN; IDENTITY.QUEX(badidx,:)=NaN; clear badidx;
47
+
48
+
49
+
50
+
51
+
52
+
53
+
54
+
scripts/s3_Demographics.m ADDED
@@ -0,0 +1,88 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ % #######################################################################################################
2
+
3
+ % Count
4
+ for groupi=1:2
5
+ for time=1:3
6
+ Sx=logical( double(IDENTITY.DEMO(:,2)==time) .* double(IDENTITY.DEMO(:,3)==groupi) );
7
+ TABLE1_COUNT(groupi,time,:)=[sum(Sx),nansum(IDENTITY.DEMO(Sx,5))];
8
+ end
9
+ end
10
+
11
+ % Other neat stuff
12
+ for groupi=1:3
13
+ time=1;
14
+ Sx=logical( double(IDENTITY.DEMO(:,2)==time) .* double(IDENTITY.DEMO(:,3)==groupi) );
15
+ TABLE1_VARS(1,groupi,:)=[mean(IDENTITY.DEMO(Sx,6)),std(IDENTITY.DEMO(Sx,6))]; % Age
16
+ TABLE1_VARS(2,groupi,:)=[mean(IDENTITY.DEMO(Sx,7)),std(IDENTITY.DEMO(Sx,7))]; % YrsEd
17
+ for npi=1:5
18
+ TABLE1_VARS(2+npi,groupi,:)=[nanmean(IDENTITY.NP(Sx,3+npi)),nanstd(IDENTITY.NP(Sx,3+npi))];
19
+ end
20
+ end
21
+ CTL_Sx=logical( double(IDENTITY.DEMO(:,2)==1) .* double(IDENTITY.DEMO(:,3)==1) );
22
+ Acute_Sx=logical( double(IDENTITY.DEMO(:,2)==1) .* double(IDENTITY.DEMO(:,3)==2) );
23
+ Chronic_Sx=logical( double(IDENTITY.DEMO(:,2)==1) .* double(IDENTITY.DEMO(:,3)==3) );
24
+
25
+ [~,T1_P,~,T1_STATS]=ttest2([IDENTITY.DEMO(CTL_Sx,[6,7]),IDENTITY.NP(CTL_Sx,4:8)],[IDENTITY.DEMO(Acute_Sx,[6,7]),IDENTITY.NP(Acute_Sx,4:8)])
26
+ [~,T1_P,~,T1_STATS]=ttest2([IDENTITY.DEMO(CTL_Sx,[6,7]),IDENTITY.NP(CTL_Sx,4:8)],[IDENTITY.DEMO(Chronic_Sx,[6,7]),IDENTITY.NP(Chronic_Sx,4:8)])
27
+
28
+ % TABLE 2
29
+ IDENTITY.TBI(Acute_Sx,4) % GCS
30
+ nanmedian(IDENTITY.TBI(Acute_Sx,6)) % LOCmins median
31
+ iqr(IDENTITY.TBI(Acute_Sx,6)) % LOCmins iqr
32
+ nansum(IDENTITY.TBI(Acute_Sx,7))
33
+ nanmedian(IDENTITY.TBI(Acute_Sx,8)) % Days median
34
+ iqr(IDENTITY.TBI(Acute_Sx,8)) % Days iqr
35
+
36
+ nanmedian(IDENTITY.TBI(Chronic_Sx,6)) % LOCmins median
37
+ iqr(IDENTITY.TBI(Chronic_Sx,6)) % LOCmins iqr
38
+ nansum(IDENTITY.TBI(Chronic_Sx,7)) % Data not here
39
+ nanmedian(IDENTITY.TBI(Chronic_Sx,9)) % Days median
40
+ iqr(IDENTITY.TBI(Chronic_Sx,9)) % Days iqr
41
+
42
+ %% ###########################################################################
43
+ clear INTERCOR_Rho INTERCOR_P;
44
+ INDEX=[find(strcmp('BDI',IDENTITY_QUEX_HDR)),find(strcmp('NSItot',IDENTITY_QUEX_HDR)),find(strcmp('F_Tot_B4',IDENTITY_QUEX_HDR)),find(strcmp('F_Tot',IDENTITY_QUEX_HDR))];
45
+ COL={'bd','rd','md'};
46
+ SHIFT=[-.05,.05,0];
47
+ figure; hold on;
48
+ for groupi=1:2
49
+ for timei=1:3
50
+ Sx=logical( double(IDENTITY.DEMO(:,2)==timei) .* double(IDENTITY.DEMO(:,3)==groupi) );
51
+ for idxi=1:4
52
+ subplot(2,2,idxi); hold on;
53
+ ThisN=sum(~isnan(IDENTITY.QUEX(Sx,INDEX(idxi))));
54
+ plot(timei+SHIFT(groupi),nanmean(IDENTITY.QUEX(Sx,INDEX(idxi))),COL{groupi});
55
+ errorbar(timei+SHIFT(groupi),nanmean(IDENTITY.QUEX(Sx,INDEX(idxi))),nanstd(IDENTITY.QUEX(Sx,INDEX(idxi)))./sqrt(ThisN),'k.');
56
+ set(gca,'xlim',[0 4],'xtick',[1:1:3]);
57
+ title(IDENTITY_QUEX_HDR{INDEX(idxi)});
58
+ % ---
59
+ Sx1=logical( double(IDENTITY.DEMO(:,2)==1) .* double(IDENTITY.DEMO(:,3)==groupi) );
60
+ Sx2=logical( double(IDENTITY.DEMO(:,2)==2) .* double(IDENTITY.DEMO(:,3)==groupi) );
61
+ Sx3=logical( double(IDENTITY.DEMO(:,2)==3) .* double(IDENTITY.DEMO(:,3)==groupi) );
62
+ plot([1+SHIFT(groupi) 2+SHIFT(groupi)],[nanmean(IDENTITY.QUEX(Sx1,INDEX(idxi))), nanmean(IDENTITY.QUEX(Sx2,INDEX(idxi)))],'k-');
63
+ plot([2+SHIFT(groupi) 3+SHIFT(groupi)],[nanmean(IDENTITY.QUEX(Sx2,INDEX(idxi))), nanmean(IDENTITY.QUEX(Sx3,INDEX(idxi)))],'k-');
64
+ % ---
65
+ end
66
+ if time==1
67
+ [INTERCOR_Rho{groupi},INTERCOR_P{groupi}]=corr(IDENTITY.QUEX(Sx,[4,5,10]),'type','Spearman','rows','pairwise');
68
+ end
69
+ end
70
+ end
71
+ groupi=3; timei=1;
72
+ Sx=logical( double(IDENTITY.DEMO(:,2)==timei) .* double(IDENTITY.DEMO(:,3)==groupi) );
73
+ for idxi=1:4
74
+ subplot(2,2,idxi); hold on;
75
+ ThisN=sum(~isnan(IDENTITY.QUEX(Sx,INDEX(idxi))));
76
+ plot(timei+SHIFT(groupi),nanmean(IDENTITY.QUEX(Sx,INDEX(idxi))),COL{groupi});
77
+ errorbar(timei+SHIFT(groupi),nanmean(IDENTITY.QUEX(Sx,INDEX(idxi))),nanstd(IDENTITY.QUEX(Sx,INDEX(idxi)))./sqrt(ThisN),'k.');
78
+ set(gca,'xlim',[0 4],'xtick',[1:1:3]);
79
+ title(IDENTITY_QUEX_HDR{INDEX(idxi)});
80
+
81
+
82
+ [INTERCOR_Rho{3},INTERCOR_P{3}]=corr(IDENTITY.QUEX(Sx,[4,5,10]),'type','Spearman','rows','pairwise');
83
+ end
84
+
85
+
86
+ %%
87
+
88
+
scripts/s4_Example_ERPs.m ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ %% Example P3a, P3b
2
+
3
+ V = logical( double(IDENTITY.DEMO(:,2)<4) );
4
+ SIZEALL=sum( V );
5
+
6
+ figure; subplot(3,3,[1:6]); hold on
7
+ plot(tx2disp,squeeze(nanmean( MEGA_ERP(V,NovSite,:,1) ,1)),'k','Linewidth',2);
8
+ plot(tx2disp,squeeze(nanmean( MEGA_ERP(V,TargSite,:,2) ,1)),'g','Linewidth',2);
9
+ plot(tx2disp,squeeze(nanmean( MEGA_ERP(V,StdSite,:,3) ,1)),'c','Linewidth',2);
10
+ shadedErrorBar(tx2disp,squeeze(nanmean( MEGA_ERP(V,NovSite,:,1) ,1)),...
11
+ squeeze(nanstd( MEGA_ERP(V,NovSite,:,1) ,1)) ./ sqrt(SIZEALL),'k');
12
+ shadedErrorBar(tx2disp,squeeze(nanmean( MEGA_ERP(V,TargSite,:,2) ,1)),...
13
+ squeeze(nanstd( MEGA_ERP(V,TargSite,:,2) ,1)) ./ sqrt(SIZEALL),'g');
14
+ shadedErrorBar(tx2disp,squeeze(nanmean( MEGA_ERP(V,StdSite,:,3) ,1)),...
15
+ squeeze(nanstd( MEGA_ERP(V,StdSite,:,3) ,1)) ./ sqrt(SIZEALL),'c');
16
+ legend({'Std','Targ','Nov'},'Location','NorthWest');
17
+ title(['N DataSets =',num2str(SIZEALL)])
18
+
19
+ plot([0 0],[-6 6],'k:'); plot([-500 1000],[0 0],'k:');
20
+ plot([NovT1 NovT1],[4 5],'m:'); plot([NovT2 NovT2],[4 5],'m:');
21
+ plot([TargT1 TargT1],[5.1 6],'r:'); plot([TargT2 TargT2],[5.1 6],'r:');
22
+
23
+ MAPLIMS=[-5 5];
24
+
25
+ subplot(3,3,7); topoplot( squeeze(nanmean(mean( MEGA_ERP(V,:,ERPWINS_tx2disp(1,1):ERPWINS_tx2disp(1,2),1) ,3) ,1)) ,...
26
+ BV_Chanlocs_60,'maplimits',MAPLIMS,'emarker2',{StdSite,'d','k'}); title('Std')
27
+ subplot(3,3,8); topoplot( squeeze(nanmean(mean( MEGA_ERP(V,:,ERPWINS_tx2disp(2,1):ERPWINS_tx2disp(2,2),2) ,3) ,1)) ,...
28
+ BV_Chanlocs_60,'maplimits',MAPLIMS,'emarker2',{TargSite,'d','k'}); title('Targ')
29
+ subplot(3,3,9); topoplot( squeeze(nanmean(mean( MEGA_ERP(V,:,ERPWINS_tx2disp(3,1):ERPWINS_tx2disp(3,2),3) ,3) ,1)) ,...
30
+ BV_Chanlocs_60,'maplimits',MAPLIMS,'emarker2',{NovSite,'d','k'}); title('Nov')
scripts/s5_ERPs_by_Group.m ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ %%
2
+
3
+ TITLES={'Std','Targ','Nov'};
4
+ COL={'b','r','m'};
5
+ YLIM=[-6 8];
6
+
7
+
8
+ figure;
9
+ for ai=1:3
10
+ subplot(3,1,ai); hold on;
11
+ for gi=1:3
12
+ V = logical( double(IDENTITY.DEMO(:,2)==time) .* double(IDENTITY.DEMO(:,3)==gi) );
13
+ plot(tx2disp,squeeze(nanmean( MEGA_ERP(V,ERPSITE(ai),:,ai) ,1)),COL{gi},'Linewidth',2);
14
+ clear V;
15
+ end
16
+ for gi=1:3
17
+ V = logical( double(IDENTITY.DEMO(:,2)==time) .* double(IDENTITY.DEMO(:,3)==gi) );
18
+ shadedErrorBar(tx2disp,squeeze(nanmean( MEGA_ERP(V,ERPSITE(ai),:,ai) ,1)),squeeze(nanstd( MEGA_ERP(V,ERPSITE(ai),:,ai) ,1))./sqrt(sum(V)),COL{gi});
19
+ clear V;
20
+ end
21
+ for gi=1:3
22
+ V = logical( double(IDENTITY.DEMO(:,2)==time) .* double(IDENTITY.DEMO(:,3)==gi) );
23
+ plot(tx2disp,squeeze(nanmean( MEGA_ERP(V,ERPSITE(ai),:,ai) ,1)),COL{gi},'Linewidth',2);
24
+ set(gca,'ylim',YLIM);
25
+ clear V;
26
+ end
27
+
28
+ plot([ERPWINS(ai,1) ERPWINS(ai,1)],[5 7],'k:'); plot([ERPWINS(ai,2) ERPWINS(ai,2)],[5 7],'k:');
29
+
30
+ title(TITLES{ai});
31
+
32
+ V_ctl = logical( double(IDENTITY.DEMO(:,2)==time) .* double(IDENTITY.DEMO(:,3)==1) );
33
+ V_acute = logical( double(IDENTITY.DEMO(:,2)==time) .* double(IDENTITY.DEMO(:,3)==2) );
34
+ V_chronic = logical( double(IDENTITY.DEMO(:,2)==time) .* double(IDENTITY.DEMO(:,3)==3) );
35
+ ERPs_ctl=squeeze(MEGA_ERP(V_ctl,ERPSITE(ai),:,ai));
36
+ ERPs_acute=squeeze(MEGA_ERP(V_acute,ERPSITE(ai),:,ai));
37
+ ERPs_chronic=squeeze(MEGA_ERP(V_chronic,ERPSITE(ai),:,ai));
38
+ [H,P,CI,STATS]=ttest2(ERPs_ctl,ERPs_acute); P(P>.05)=NaN; P(P<=.05)=1;
39
+ plot(tx2disp,-4.*P,'r'); clear H P CI STATS TEMP*;
40
+ [H,P,CI,STATS]=ttest2(ERPs_ctl,ERPs_chronic); P(P>.05)=NaN; P(P<=.05)=1;
41
+ plot(tx2disp,-4.5.*P,'m'); clear H P CI STATS TEMP*;
42
+
43
+ clear V_ctl V_acute V_chronic ERPs_ctl ERPs_acute ERPs_chronic
44
+
45
+ end
46
+ legend({'CTL','Acute','Chronic'},'Location','NorthWest')
47
+
48
+
49
+ %%
50
+
51
+ figure;
52
+ for ai=1:3 % Condi
53
+ subplot(3,1,ai); hold on;
54
+ for gi=1:3 % Group
55
+ for time=1:3
56
+ V = logical( double(IDENTITY.DEMO(:,2)==time) .* double(IDENTITY.DEMO(:,3)==gi) );
57
+ plot(time,squeeze(nanmean(mean( MEGA_ERP(V,ERPSITE(ai),ERPWINS_tx2disp(ai,1):ERPWINS_tx2disp(ai,2),ai) ,3),1)),[COL{gi},'d']);
58
+ errorbar(time,squeeze(nanmean(mean( MEGA_ERP(V,ERPSITE(ai),ERPWINS_tx2disp(ai,1):ERPWINS_tx2disp(ai,2),ai) ,3),1)),...
59
+ squeeze(nanstd(mean( MEGA_ERP(V,ERPSITE(ai),ERPWINS_tx2disp(ai,1):ERPWINS_tx2disp(ai,2),ai) ,3),1)) ./sqrt(sum(V)),'k.');
60
+ BIG_OL_N(gi,time)=sum(V);
61
+
62
+ clear V;
63
+ end
64
+
65
+ % ---
66
+ V1 = logical( double(IDENTITY.DEMO(:,2)==1) .* double(IDENTITY.DEMO(:,3)==gi) );
67
+ V2 = logical( double(IDENTITY.DEMO(:,2)==2) .* double(IDENTITY.DEMO(:,3)==gi) );
68
+ V3 = logical( double(IDENTITY.DEMO(:,2)==3) .* double(IDENTITY.DEMO(:,3)==gi) );
69
+ plot([1 2],[squeeze(nanmean(mean( MEGA_ERP(V1,ERPSITE(ai),ERPWINS_tx2disp(ai,1):ERPWINS_tx2disp(ai,2),ai) ,3),1)) ,...
70
+ squeeze(nanmean(mean( MEGA_ERP(V2,ERPSITE(ai),ERPWINS_tx2disp(ai,1):ERPWINS_tx2disp(ai,2),ai) ,3),1))],'k-');
71
+ plot([2 3],[squeeze(nanmean(mean( MEGA_ERP(V2,ERPSITE(ai),ERPWINS_tx2disp(ai,1):ERPWINS_tx2disp(ai,2),ai) ,3),1)) ,...
72
+ squeeze(nanmean(mean( MEGA_ERP(V3,ERPSITE(ai),ERPWINS_tx2disp(ai,1):ERPWINS_tx2disp(ai,2),ai) ,3),1))],'k-');
73
+ clear V*;
74
+ % ---
75
+
76
+ end
77
+
78
+ set(gca,'xlim',[0 4],'xtick',[1:1:3])
79
+ title(TITLES{ai});
80
+ end
81
+
82
+ %%
83
+
84
+ STATS{1}=[]; STATS{2}=[]; % Targ, Nov
85
+ for si=1:2 % CTL, Acute
86
+ Sx=logical( double(IDENTITY.DEMO(:,3)==si) );
87
+ Sx_idxs=unique(IDENTITY.DEMO(Sx,1));
88
+ for sxi=1:length(Sx_idxs)
89
+ thisguy=Sx_idxs(sxi);
90
+
91
+ FIRST=[]; SECOND=[]; THIRD=[];
92
+
93
+ FIRST=find(logical( double(IDENTITY.DEMO(:,1)==thisguy) .* double(IDENTITY.DEMO(:,2)==1) ));
94
+ SECOND=find(logical( double(IDENTITY.DEMO(:,1)==thisguy) .* double(IDENTITY.DEMO(:,2)==2) ));
95
+ THIRD=find(logical( double(IDENTITY.DEMO(:,1)==thisguy) .* double(IDENTITY.DEMO(:,2)==3) ));
96
+
97
+ ai=2;
98
+ ERP1=NaN; ERP2=NaN; ERP3=NaN;
99
+ if ~isempty(FIRST), ERP1=mean( MEGA_ERP(FIRST,ERPSITE(ai),ERPWINS_tx2disp(ai,1):ERPWINS_tx2disp(ai,2),ai) ,3); end
100
+ if ~isempty(SECOND), ERP2=mean( MEGA_ERP(SECOND,ERPSITE(ai),ERPWINS_tx2disp(ai,1):ERPWINS_tx2disp(ai,2),ai) ,3); end
101
+ if ~isempty(THIRD), ERP3=mean( MEGA_ERP(THIRD,ERPSITE(ai),ERPWINS_tx2disp(ai,1):ERPWINS_tx2disp(ai,2),ai) ,3); end
102
+ STATS{ai-1}=[STATS{ai-1};thisguy,si,ERP1,ERP2,ERP3];
103
+
104
+ ai=3;
105
+ ERP1=NaN; ERP2=NaN; ERP3=NaN;
106
+ if ~isempty(FIRST), ERP1=mean( MEGA_ERP(FIRST,ERPSITE(ai),ERPWINS_tx2disp(ai,1):ERPWINS_tx2disp(ai,2),ai) ,3); end
107
+ if ~isempty(SECOND), ERP2=mean( MEGA_ERP(SECOND,ERPSITE(ai),ERPWINS_tx2disp(ai,1):ERPWINS_tx2disp(ai,2),ai) ,3); end
108
+ if ~isempty(THIRD), ERP3=mean( MEGA_ERP(THIRD,ERPSITE(ai),ERPWINS_tx2disp(ai,1):ERPWINS_tx2disp(ai,2),ai) ,3); end
109
+ STATS{ai-1}=[STATS{ai-1};thisguy,si,ERP1,ERP2,ERP3];
110
+
111
+ clear thisguy;
112
+ end
113
+ clear Sx Sx_idxs;
114
+ end
115
+
116
+ si=3; clear Sx Sx_idxs; % chronic
117
+ Sx=logical( double(IDENTITY.DEMO(:,3)==si) );
118
+ Sx_idxs=unique(IDENTITY.DEMO(Sx,1));
119
+ for sxi=1:length(Sx_idxs)
120
+
121
+ thisguy=Sx_idxs(sxi);
122
+ FIRST=[];
123
+ FIRST=find(logical( double(IDENTITY.DEMO(:,1)==thisguy) .* double(IDENTITY.DEMO(:,2)==1) ));
124
+
125
+ ai=2;
126
+ ERP1=NaN;
127
+ if ~isempty(FIRST), ERP1=mean( MEGA_ERP(FIRST,ERPSITE(ai),ERPWINS_tx2disp(ai,1):ERPWINS_tx2disp(ai,2),ai) ,3); end
128
+ STATS{ai-1}=[STATS{ai-1};thisguy,si,ERP1,NaN,NaN];
129
+
130
+ ai=3;
131
+ ERP1=NaN;
132
+ if ~isempty(FIRST), ERP1=mean( MEGA_ERP(FIRST,ERPSITE(ai),ERPWINS_tx2disp(ai,1):ERPWINS_tx2disp(ai,2),ai) ,3); end
133
+ STATS{ai-1}=[STATS{ai-1};thisguy,si,ERP1,NaN,NaN];
134
+
135
+ clear thisguy;
136
+ end
137
+
scripts/s6_Correlations.m ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ %%
2
+
3
+ COLS={'b','r','m'};
4
+ figure;
5
+ for si=1:3
6
+
7
+ Sx=logical( double(IDENTITY.DEMO(:,2)==time) .* double(IDENTITY.DEMO(:,3)==si) );
8
+
9
+ % --------------------
10
+
11
+ IV=squeeze(mean(MEGA_ERP(Sx,ERPSITE(CONDI4Corr),ERPWINS_tx2disp(CONDI4Corr,1):ERPWINS_tx2disp(CONDI4Corr,2),CONDI4Corr),3));
12
+
13
+ [rho,rho_p]=corr(IV,DV(Sx),'type','Spearman','rows','pairwise');
14
+ [r,p]=corr(IV,DV(Sx),'type','Pearson','rows','pairwise');
15
+ subplot(2,3,si); hold on
16
+ scatter(IV,DV(Sx),COLS{si}); lsline
17
+ set(gca,'xlim',[-10 20],'ylim',[20 120]);
18
+ % text(.1,.7,['df=',num2str(sum(logical(double(~isnan(IV)).*double(~isnan(DV(Sx))))) -2 ),' r=',num2str(r),' p=',num2str(p)],'sc');
19
+ text(.1,.6,['df=',num2str(sum(logical(double(~isnan(IV)).*double(~isnan(DV(Sx))))) -2 ),' rho=',num2str(rho),' p=',num2str(rho_p)],'sc');
20
+ clear rho rho_p r p IV
21
+ title( BV_Chanlocs_60(ERPSITE(CONDI4Corr)).labels );
22
+
23
+ % --------------------
24
+
25
+ IV=squeeze(mean(MEGA_ERP(Sx,:,ERPWINS_tx2disp(CONDI4Corr,1):ERPWINS_tx2disp(CONDI4Corr,2),CONDI4Corr),3));
26
+
27
+ [rho,rho_p]=corr(IV,DV(Sx),'type','Spearman','rows','pairwise');
28
+ subplot(2,3,3+si); hold on
29
+ rho_p(rho_p>=.05)=NaN; rho_p(rho_p<.05)=1; rho_p(isnan(rho_p))=0;
30
+ topoplot(rho,BV_Chanlocs_60,'emarker2',{find(rho_p==1),'d','k',10,1});
31
+ clear rho rhop_p IV;
32
+
33
+ % --------------------
34
+
35
+ end
36
+
scripts/s6_Correlations_S1EEG_With_FrSBediffs.m ADDED
@@ -0,0 +1,111 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ %%
2
+ COLS={'c','m'};
3
+ ROWS={'1-2','1-3','2-3'};
4
+ figure;
5
+ for si=1:2
6
+ Sx=logical( double(IDENTITY.DEMO(:,3)==si) );
7
+ Sx_idxs=unique(IDENTITY.DEMO(Sx,1));
8
+
9
+ clear IV* DV* *12 *23 *13;
10
+ for sxi=1:length(Sx_idxs)
11
+ thisguy=Sx_idxs(sxi);
12
+
13
+ FIRST=find(logical( double(IDENTITY.DEMO(:,1)==thisguy) .* double(IDENTITY.DEMO(:,2)==1) ));
14
+ SECOND=find(logical( double(IDENTITY.DEMO(:,1)==thisguy) .* double(IDENTITY.DEMO(:,2)==2) ));
15
+ THIRD=find(logical( double(IDENTITY.DEMO(:,1)==thisguy) .* double(IDENTITY.DEMO(:,2)==3) ));
16
+
17
+ if ~isempty(FIRST) && ~isempty(SECOND)
18
+ IVs12(sxi,:)= squeeze(mean(MEGA_ERP(FIRST,ERPSITE(CONDI4Corr),ERPWINS_tx2disp(CONDI4Corr,1):ERPWINS_tx2disp(CONDI4Corr,2),CONDI4Corr),3)) ;
19
+ DVs12(sxi,:)=IDENTITY.QUEX(SECOND,quexidx)-IDENTITY.QUEX(FIRST,quexidx);
20
+ age12(sxi,:)=IDENTITY.QUEX(FIRST,6);
21
+ TOPF12(sxi,:)=IDENTITY.NP(FIRST,4);
22
+ sex12(sxi,:)=IDENTITY.DEMO(FIRST,5);
23
+ % NP vars that also predicted dropout
24
+ Span12(sxi,:)=IDENTITY.NP(FIRST,6);
25
+ Coding12(sxi,:)=IDENTITY.NP(FIRST,5);
26
+ else
27
+ IVs12(sxi,:)=NaN;
28
+ DVs12(sxi,:)=NaN;
29
+ age12(sxi,:)=NaN;
30
+ TOPF12(sxi,:)=NaN;
31
+ sex12(sxi,:)=NaN;
32
+ end
33
+
34
+ if ~isempty(SECOND) && ~isempty(THIRD)
35
+ IVs23(sxi,:)= squeeze(mean(MEGA_ERP(SECOND,ERPSITE(CONDI4Corr),ERPWINS_tx2disp(CONDI4Corr,1):ERPWINS_tx2disp(CONDI4Corr,2),CONDI4Corr),3)) ;
36
+ DVs23(sxi,:)=IDENTITY.QUEX(THIRD,quexidx)-IDENTITY.QUEX(SECOND,quexidx);
37
+ else
38
+ IVs23(sxi,:)=NaN;
39
+ DVs23(sxi,:)=NaN;
40
+ end
41
+
42
+ if ~isempty(FIRST) && ~isempty(THIRD)
43
+ IVs13(sxi,:)= squeeze(mean(MEGA_ERP(FIRST,ERPSITE(CONDI4Corr),ERPWINS_tx2disp(CONDI4Corr,1):ERPWINS_tx2disp(CONDI4Corr,2),CONDI4Corr),3)) ;
44
+ DVs13(sxi,:)=IDENTITY.QUEX(THIRD,quexidx)-IDENTITY.QUEX(FIRST,quexidx);
45
+ else
46
+ IVs13(sxi,:)=NaN;
47
+ DVs13(sxi,:)=NaN;
48
+ end
49
+
50
+ end
51
+
52
+ % --------------------
53
+
54
+ [rho,rho_p]=corr(IVs12,DVs12,'type','Spearman','rows','pairwise');
55
+ [r,p]=corr(IVs12,DVs12,'type','Pearson','rows','pairwise');
56
+ subplot(3,2,si); hold on
57
+ scatter(IVs12,DVs12,COLS{si}); lsline
58
+ set(gca,'xlim',[-10 20],'ylim',[-40 40]);
59
+ text(.1,.7,['df=',num2str(sum(logical(double(~isnan(IVs12)).*double(~isnan(DVs12)))) -2 ),' r=',num2str(r),' p=',num2str(p)],'sc');
60
+ text(.1,.6,['df=',num2str(sum(logical(double(~isnan(IVs12)).*double(~isnan(DVs12)))) -2 ),' rho=',num2str(rho),' p=',num2str(rho_p)],'sc');
61
+ clear rho rho_p r p IV
62
+ title( [BV_Chanlocs_60(ERPSITE(CONDI4Corr)).labels,' S1EEG ',ROWS{1}] );
63
+
64
+ % --------------------
65
+
66
+ [rho,rho_p]=corr(IVs13,DVs13,'type','Spearman','rows','pairwise');
67
+ [r,p]=corr(IVs13,DVs13,'type','Pearson','rows','pairwise');
68
+ subplot(3,2,si+2); hold on
69
+ scatter(IVs13,DVs13,COLS{si}); lsline
70
+ set(gca,'xlim',[-10 20],'ylim',[-40 40]);
71
+ text(.1,.7,['df=',num2str(sum(logical(double(~isnan(IVs13)).*double(~isnan(DVs13)))) -2 ),' r=',num2str(r),' p=',num2str(p)],'sc');
72
+ text(.1,.6,['df=',num2str(sum(logical(double(~isnan(IVs13)).*double(~isnan(DVs13)))) -2 ),' rho=',num2str(rho),' p=',num2str(rho_p)],'sc');
73
+ clear rho rho_p r p IV
74
+ title( [BV_Chanlocs_60(ERPSITE(CONDI4Corr)).labels,' S1EEG ',ROWS{2}] );
75
+
76
+ % --------------------
77
+
78
+ [rho,rho_p]=corr(IVs23,DVs23,'type','Spearman','rows','pairwise');
79
+ [r,p]=corr(IVs23,DVs23,'type','Pearson','rows','pairwise');
80
+ subplot(3,2,si+4); hold on
81
+ scatter(IVs23,DVs23,COLS{si}); lsline
82
+ set(gca,'xlim',[-10 20],'ylim',[-40 40]);
83
+ text(.1,.7,['df=',num2str(sum(logical(double(~isnan(IVs23)).*double(~isnan(DVs23)))) -2 ),' r=',num2str(r),' p=',num2str(p)],'sc');
84
+ text(.1,.6,['df=',num2str(sum(logical(double(~isnan(IVs23)).*double(~isnan(DVs23)))) -2 ),' rho=',num2str(rho),' p=',num2str(rho_p)],'sc');
85
+ clear rho rho_p r p IV
86
+ title( [BV_Chanlocs_60(ERPSITE(CONDI4Corr)).labels,' S1EEG ',ROWS{3}] );
87
+
88
+ % --------------------
89
+
90
+ end
91
+
92
+
93
+ % Check demographic (S1) vars in the sub-acute group on FrSBe change
94
+
95
+ [r,p]=corr(DVs12,age12,'type','Spearman','rows','pairwise')
96
+
97
+ [r,p]=corr(DVs12,TOPF12,'type','Spearman','rows','pairwise')
98
+
99
+ [H,P,CI,STATS]=ttest2(DVs12(sex12==1),DVs12(sex12==0))
100
+
101
+ [r,p]=corr(DVs12,Span12,'type','Spearman','rows','pairwise')
102
+ [r,p]=corr(DVs12,Coding12,'type','Spearman','rows','pairwise')
103
+
104
+
105
+
106
+
107
+ [r,p]=corr(DVs13,age12,'type','Spearman','rows','pairwise')
108
+ [r,p]=corr(DVs13,TOPF12,'type','Spearman','rows','pairwise')
109
+ [H,P,CI,STATS]=ttest2(DVs13(sex12==1),DVs13(sex12==0))
110
+ [r,p]=corr(DVs13,Span12,'type','Spearman','rows','pairwise')
111
+ [r,p]=corr(DVs13,Coding12,'type','Spearman','rows','pairwise')
scripts/s6_FOR_SPSS.m ADDED
@@ -0,0 +1,111 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ for CONDI4Corr=2:3; % Std, Targ, Nov
2
+ IDENTITY.ERP(:,CONDI4Corr-1)=squeeze(mean(MEGA_ERP(:,ERPSITE(CONDI4Corr),ERPWINS_tx2disp(CONDI4Corr,1):ERPWINS_tx2disp(CONDI4Corr,2),CONDI4Corr),3));
3
+ end
4
+
5
+ UNIQUE_SX=unique(IDENTITY.DEMO(~isnan(IDENTITY.DEMO(:,1)),1));
6
+ for sxi=1:length(UNIQUE_SX)
7
+ thisguy=UNIQUE_SX(sxi);
8
+
9
+ FIRST=[]; SECOND=[]; THIRD=[];
10
+
11
+ FIRST=find(logical( double(IDENTITY.DEMO(:,1)==thisguy) .* double(IDENTITY.DEMO(:,2)==1) ));
12
+ SECOND=find(logical( double(IDENTITY.DEMO(:,1)==thisguy) .* double(IDENTITY.DEMO(:,2)==2) ));
13
+ THIRD=find(logical( double(IDENTITY.DEMO(:,1)==thisguy) .* double(IDENTITY.DEMO(:,2)==3) ));
14
+
15
+
16
+ % for standard models
17
+ if ~isempty(FIRST) % B/C of bad EEG
18
+ FORSPSS(sxi,1)=IDENTITY.DEMO(FIRST,1); FORSPSS_HDR{1}='subno';
19
+ FORSPSS(sxi,2)=IDENTITY.DEMO(FIRST,find(strcmp('session',IDENTITY_DEMO_HDR))); FORSPSS_HDR{2}='session';
20
+ FORSPSS(sxi,3)=IDENTITY.DEMO(FIRST,find(strcmp('group',IDENTITY_DEMO_HDR))); FORSPSS_HDR{3}='group';
21
+ FORSPSS(sxi,4)=IDENTITY.DEMO(FIRST,find(strcmp('SexF1',IDENTITY_DEMO_HDR))); FORSPSS_HDR{4}='SexF1';
22
+ FORSPSS(sxi,5)=IDENTITY.DEMO(FIRST,find(strcmp('Age',IDENTITY_DEMO_HDR))); FORSPSS_HDR{5}='Age';
23
+ FORSPSS(sxi,6)=IDENTITY.NP(FIRST,find(strcmp('TOPF',IDENTITY_NP_HDR))); FORSPSS_HDR{6}='TOPF';
24
+ FORSPSS(sxi,7)=IDENTITY.TBI(FIRST,find(strcmp('Days',IDENTITY_TBI_HDR))); FORSPSS_HDR{7}='Days';
25
+
26
+ FORSPSS(sxi,8)=IDENTITY.QUEX(FIRST,find(strcmp('BDI',IDENTITY_QUEX_HDR))); FORSPSS_HDR{8}='BDI_1';
27
+ FORSPSS(sxi,9)=IDENTITY.QUEX(FIRST,find(strcmp('NSItot',IDENTITY_QUEX_HDR))); FORSPSS_HDR{9}='NSI_1';
28
+ FORSPSS(sxi,10)=IDENTITY.QUEX(FIRST,find(strcmp('F_Tot_B4',IDENTITY_QUEX_HDR))); FORSPSS_HDR{10}='F_B4_1';
29
+ FORSPSS(sxi,11)=IDENTITY.QUEX(FIRST,find(strcmp('F_Tot',IDENTITY_QUEX_HDR))); FORSPSS_HDR{11}='F_Tot_1';
30
+
31
+ FORSPSS(sxi,12)=IDENTITY.ERP(FIRST,1); FORSPSS_HDR{12}='P3b_1';
32
+ FORSPSS(sxi,13)=IDENTITY.ERP(FIRST,2); FORSPSS_HDR{13}='P3a_1';
33
+ else
34
+ FORSPSS(sxi,1)=IDENTITY.DEMO(SECOND,1);
35
+ FORSPSS(sxi,2)=IDENTITY.DEMO(SECOND,find(strcmp('session',IDENTITY_DEMO_HDR)));
36
+ FORSPSS(sxi,3)=IDENTITY.DEMO(SECOND,find(strcmp('group',IDENTITY_DEMO_HDR)));
37
+ FORSPSS(sxi,4)=IDENTITY.DEMO(SECOND,find(strcmp('SexF1',IDENTITY_DEMO_HDR)));
38
+ FORSPSS(sxi,5)=IDENTITY.DEMO(SECOND,find(strcmp('Age',IDENTITY_DEMO_HDR)));
39
+ FORSPSS(sxi,6)=NaN;
40
+ FORSPSS(sxi,7)=NaN;
41
+
42
+ FORSPSS(sxi,8)=NaN;
43
+ FORSPSS(sxi,9)=NaN;
44
+ FORSPSS(sxi,10)=NaN;
45
+ FORSPSS(sxi,11)=NaN;
46
+
47
+ FORSPSS(sxi,12)=NaN;
48
+ FORSPSS(sxi,13)=NaN;
49
+ end
50
+
51
+ if ~isempty(SECOND)
52
+ FORSPSS(sxi,14)=IDENTITY.QUEX(SECOND,find(strcmp('BDI',IDENTITY_QUEX_HDR))); FORSPSS_HDR{14}='BDI_2';
53
+ FORSPSS(sxi,15)=IDENTITY.QUEX(SECOND,find(strcmp('NSItot',IDENTITY_QUEX_HDR))); FORSPSS_HDR{15}='NSI_2';
54
+ FORSPSS(sxi,16)=IDENTITY.QUEX(SECOND,find(strcmp('F_Tot_B4',IDENTITY_QUEX_HDR))); FORSPSS_HDR{16}='F_B4_2';
55
+ FORSPSS(sxi,17)=IDENTITY.QUEX(SECOND,find(strcmp('F_Tot',IDENTITY_QUEX_HDR))); FORSPSS_HDR{17}='F_Tot_2';
56
+ FORSPSS(sxi,18)=IDENTITY.ERP(SECOND,1); FORSPSS_HDR{18}='P3b_2';
57
+ FORSPSS(sxi,19)=IDENTITY.ERP(SECOND,2); FORSPSS_HDR{19}='P3a_2';
58
+ else
59
+ FORSPSS(sxi,14)=NaN;
60
+ FORSPSS(sxi,15)=NaN;
61
+ FORSPSS(sxi,16)=NaN;
62
+ FORSPSS(sxi,17)=NaN;
63
+ FORSPSS(sxi,18)=NaN;
64
+ FORSPSS(sxi,19)=NaN;
65
+ end
66
+
67
+ if ~isempty(THIRD)
68
+ FORSPSS(sxi,20)=IDENTITY.QUEX(THIRD,find(strcmp('BDI',IDENTITY_QUEX_HDR))); FORSPSS_HDR{20}='BDI_3';
69
+ FORSPSS(sxi,21)=IDENTITY.QUEX(THIRD,find(strcmp('NSItot',IDENTITY_QUEX_HDR))); FORSPSS_HDR{21}='NSI_3';
70
+ FORSPSS(sxi,22)=IDENTITY.QUEX(THIRD,find(strcmp('F_Tot_B4',IDENTITY_QUEX_HDR))); FORSPSS_HDR{22}='F_B4_3';
71
+ FORSPSS(sxi,23)=IDENTITY.QUEX(THIRD,find(strcmp('F_Tot',IDENTITY_QUEX_HDR))); FORSPSS_HDR{23}='F_Tot_3';
72
+ FORSPSS(sxi,24)=IDENTITY.ERP(THIRD,1); FORSPSS_HDR{24}='P3b_3';
73
+ FORSPSS(sxi,25)=IDENTITY.ERP(THIRD,2); FORSPSS_HDR{25}='P3a_3';
74
+ else
75
+ FORSPSS(sxi,20)=NaN;
76
+ FORSPSS(sxi,21)=NaN;
77
+ FORSPSS(sxi,22)=NaN;
78
+ FORSPSS(sxi,23)=NaN;
79
+ FORSPSS(sxi,24)=NaN;
80
+ FORSPSS(sxi,25)=NaN;
81
+ end
82
+
83
+
84
+
85
+ end
86
+
87
+
88
+ % For Mixed Linear Modeling
89
+ for sxi=1:length(IDENTITY.DEMO)
90
+
91
+ FORMLM(sxi,1)=IDENTITY.DEMO(sxi,1); FORMLM_HDR{1}='subno';
92
+ FORMLM(sxi,2)=IDENTITY.DEMO(sxi,find(strcmp('session',IDENTITY_DEMO_HDR))); FORMLM_HDR{2}='session';
93
+ FORMLM(sxi,3)=IDENTITY.DEMO(sxi,find(strcmp('group',IDENTITY_DEMO_HDR))); FORMLM_HDR{3}='group';
94
+ FORMLM(sxi,4)=IDENTITY.DEMO(sxi,find(strcmp('SexF1',IDENTITY_DEMO_HDR))); FORMLM_HDR{4}='SexF1';
95
+ FORMLM(sxi,5)=IDENTITY.DEMO(sxi,find(strcmp('Age',IDENTITY_DEMO_HDR))); FORMLM_HDR{5}='Age';
96
+ FORMLM(sxi,6)=IDENTITY.NP(sxi,find(strcmp('TOPF',IDENTITY_NP_HDR))); FORMLM_HDR{6}='TOPF';
97
+ FORMLM(sxi,7)=IDENTITY.TBI(sxi,find(strcmp('Days',IDENTITY_TBI_HDR))); FORMLM_HDR{7}='Days';
98
+
99
+ FORMLM(sxi,8)=IDENTITY.QUEX(sxi,find(strcmp('BDI',IDENTITY_QUEX_HDR))); FORMLM_HDR{8}='BDI';
100
+ FORMLM(sxi,9)=IDENTITY.QUEX(sxi,find(strcmp('NSItot',IDENTITY_QUEX_HDR))); FORMLM_HDR{9}='NSI';
101
+ FORMLM(sxi,10)=IDENTITY.QUEX(sxi,find(strcmp('F_Tot_B4',IDENTITY_QUEX_HDR))); FORMLM_HDR{10}='F_B4';
102
+ FORMLM(sxi,11)=IDENTITY.QUEX(sxi,find(strcmp('F_Tot',IDENTITY_QUEX_HDR))); FORMLM_HDR{11}='F_Tot';
103
+
104
+ FORMLM(sxi,12)=IDENTITY.ERP(sxi,1); FORMLM_HDR{12}='P3b';
105
+ FORMLM(sxi,13)=IDENTITY.ERP(sxi,2); FORMLM_HDR{13}='P3a';
106
+
107
+ end
108
+
109
+ FORMLM_HDR=FORMLM_HDR';
110
+ FORSPSS_HDR=FORSPSS_HDR';
111
+
scripts/s7_Mengs_z.m ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ % mengz_JFC(r1, r2, r12, n) compares two correlations r1 and r2:
2
+ % r1: correlation between X and Y
3
+ % r2: correlation between X and Z
4
+ % r12: correlation between Y and Z
5
+ % n: number of observations used to compute correlations
6
+
7
+ %% ---------------
8
+ clear Sx CONDI4Corr rhoXY rhoXZ rhoYZ n menghyp mengp mengzscore
9
+
10
+ Sx=logical( double(IDENTITY.DEMO(:,2)==1) .* double(IDENTITY.DEMO(:,3)==3) );
11
+ CONDI4Corr=2;
12
+ MING_CHRONIC.P3b=squeeze(mean(MEGA_ERP(Sx,ERPSITE(CONDI4Corr),ERPWINS_tx2disp(CONDI4Corr,1):ERPWINS_tx2disp(CONDI4Corr,2),CONDI4Corr),3));
13
+ CONDI4Corr=3;
14
+ MING_CHRONIC.P3a=squeeze(mean(MEGA_ERP(Sx,ERPSITE(CONDI4Corr),ERPWINS_tx2disp(CONDI4Corr,1):ERPWINS_tx2disp(CONDI4Corr,2),CONDI4Corr),3));
15
+
16
+ rhoXY=-.46; % FrSBe & P3b
17
+ rhoXZ=.08; % FrSBe & P3a
18
+ rhoYZ=corr(MING_CHRONIC.P3b,MING_CHRONIC.P3a,'type','Spearman','rows','pairwise'); % P3a & PBb
19
+ n=sum(Sx);
20
+ [menghyp,mengp,mengzscore] = mengz_JFC(rhoXY,rhoXZ,rhoYZ,n)
21
+
22
+ %% ---------------
23
+ clear Sx CONDI4Corr rhoXY rhoXZ rhoYZ n menghyp mengp mengzscore
24
+
25
+ Sx=logical( double(IDENTITY.DEMO(:,2)==1) .* double(IDENTITY.DEMO(:,3)==2) );
26
+ CONDI4Corr=2;
27
+ MING_ACUTE.P3b=squeeze(mean(MEGA_ERP(Sx,ERPSITE(CONDI4Corr),ERPWINS_tx2disp(CONDI4Corr,1):ERPWINS_tx2disp(CONDI4Corr,2),CONDI4Corr),3));
28
+ CONDI4Corr=3;
29
+ MING_ACUTE.P3a=squeeze(mean(MEGA_ERP(Sx,ERPSITE(CONDI4Corr),ERPWINS_tx2disp(CONDI4Corr,1):ERPWINS_tx2disp(CONDI4Corr,2),CONDI4Corr),3));
30
+
31
+ rhoXY=-.11; % FrSBe & P3b
32
+ rhoXZ=-.44; % FrSBe & P3a
33
+ rhoYZ=corr(MING_ACUTE.P3b,MING_ACUTE.P3a,'type','Spearman','rows','pairwise'); % P3a & PBb
34
+ n=sum(Sx);
35
+ [menghyp,mengp,mengzscore] = mengz_JFC(rhoXY,rhoXZ,rhoYZ,n)
36
+
37
+ %% ---------------
38
+ clear Sx CONDI4Corr rhoXY rhoXZ rhoYZ n menghyp mengp mengzscore
39
+
40
+ Sx=logical( double(IDENTITY.DEMO(:,2)==2) .* double(IDENTITY.DEMO(:,3)==2) );
41
+ CONDI4Corr=2;
42
+ MING_ACUTE_S2.P3b=squeeze(mean(MEGA_ERP(Sx,ERPSITE(CONDI4Corr),ERPWINS_tx2disp(CONDI4Corr,1):ERPWINS_tx2disp(CONDI4Corr,2),CONDI4Corr),3));
43
+ CONDI4Corr=3;
44
+ MING_ACUTE_S2.P3a=squeeze(mean(MEGA_ERP(Sx,ERPSITE(CONDI4Corr),ERPWINS_tx2disp(CONDI4Corr,1):ERPWINS_tx2disp(CONDI4Corr,2),CONDI4Corr),3));
45
+
46
+ rhoXY=-.11; % FrSBe & P3b
47
+ rhoXZ=-.49; % FrSBe & P3a
48
+ rhoYZ=corr(MING_ACUTE_S2.P3b,MING_ACUTE_S2.P3a,'type','Spearman','rows','pairwise'); % P3a & PBb
49
+ n=sum(Sx);
50
+ [menghyp,mengp,mengzscore] = mengz_JFC(rhoXY,rhoXZ,rhoYZ,n)
51
+
scripts/sx_Predict_Attrition.m ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ %%
2
+
3
+ acuteTBI=IDENTITY.DEMO(:,3)==2;
4
+ Sx_idxs=unique(IDENTITY.DEMO(acuteTBI,1));
5
+
6
+ AttritionPredictors=NaN(length(Sx_idxs),14);
7
+ for sxi=1:length(Sx_idxs)
8
+ thisguy=Sx_idxs(sxi);
9
+
10
+ FIRST=find(logical( double(IDENTITY.DEMO(:,1)==thisguy) .* double(IDENTITY.DEMO(:,2)==1) ));
11
+ SECOND=find(logical( double(IDENTITY.DEMO(:,1)==thisguy) .* double(IDENTITY.DEMO(:,2)==2) ));
12
+ THIRD=find(logical( double(IDENTITY.DEMO(:,1)==thisguy) .* double(IDENTITY.DEMO(:,2)==3) ));
13
+
14
+ if ~isempty(FIRST), Attrition(sxi,1)=1;
15
+ AttritionPredictors_HDR={'age';'sex';'TOPF';'Coding';'Span';'HVLT13';'HVLTDelay';'GCS';'LOCtime';'LOM';'Days';'BDI';'NSI';'FrSBe'};
16
+ AttritionPredictors(sxi,:)=[...
17
+ IDENTITY.DEMO(FIRST,6:7),...
18
+ IDENTITY.NP(FIRST,4:8),...
19
+ IDENTITY.TBI(FIRST,[4,6:8]),...
20
+ IDENTITY.QUEX(FIRST,[4,5,10]),...
21
+ ];
22
+ else Attrition(sxi,1)=0; end
23
+ if ~isempty(SECOND), Attrition(sxi,2)=1; else Attrition(sxi,2)=0; end
24
+ if ~isempty(THIRD), Attrition(sxi,3)=1; else Attrition(sxi,3)=0; end
25
+
26
+ clear thisguy FIRST SECOND THIRD;
27
+ end
28
+
29
+ % Clear NaNs
30
+ AttritionPredictors=AttritionPredictors(~isnan(AttritionPredictors(:,1)),:);
31
+ Attrition=Attrition(~isnan(AttritionPredictors(:,1)),:);
32
+
33
+
34
+ for atti=1:size(AttritionPredictors,2)
35
+ for sessi=1:3 % 1 doesn't really make sense, but added here to keep columns nice
36
+
37
+ A=AttritionPredictors(:,atti);
38
+ B=Attrition(:,sessi);
39
+
40
+ A2=A(~isnan(A));
41
+ B2=B(~isnan(A));
42
+
43
+ % B_acc is [constant, v1, v2,v1*v2] [validated by SPSS]
44
+ % STATS_acc.p is the p value for each
45
+ [B_acc,DEV_acc,STATS_acc] = glmfit(zscore(A2),B2, 'binomial','link','logit');
46
+ Predictors_Logistic{sessi}(atti,1)=B_acc(2);
47
+ Predictors_Logistic{sessi}(atti,2)=STATS_acc.p(2);
48
+
49
+ [~,P,~,STATS]=ttest2(A2(B2==1),A2(B2==0));
50
+ Predictors_t{sessi}(atti,1)=STATS.tstat;
51
+ Predictors_t{sessi}(atti,2)=P;
52
+
53
+ [P,~,U]=ranksum(A2(B2==1),A2(B2==0));
54
+ Predictors_u{sessi}(atti,1)=U.zval;
55
+ Predictors_u{sessi}(atti,2)=P;
56
+
57
+ clear B_acc DEV_acc STATS_acc A B A2 B2 STATS P U;
58
+ end
59
+ end
60
+
61
+ % % % S2:
62
+ % Logistic - 5
63
+ % t-test - 5
64
+ % U-test - 5
65
+
66
+ % % % S3:
67
+ % Logistic - 3,5 [almost 4]
68
+ % t-test - 3,5 [almost 4]
69
+ % U-test - 3,4,5
70
+
71
+ % 3=TOPF
72
+ % 4=Coding
73
+ % 5=Span
74
+
75
+ % Sess 2 dropouts:
76
+ nanmean(AttritionPredictors(Attrition(:,2)==0,3:5))
77
+ % Sess 2 stays:
78
+ nanmean(AttritionPredictors(Attrition(:,2)==1,3:5))
79
+
80
+ % Sess 3 dropouts:
81
+ nanmean(AttritionPredictors(Attrition(:,3)==0,3:5))
82
+ % Sess 3 stays:
83
+ nanmean(AttritionPredictors(Attrition(:,3)==1,3:5))
84
+
85
+ % So Lower Span predicts S2 dropout and Lower Span, Coding, and TOPF predict S3 dropout