forked from m2lines/L96_demo
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathreferences.bib
137 lines (125 loc) · 8.03 KB
/
references.bib
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
---
---
@inproceedings{holdgraf_evidence_2014,
address = {Brisbane, Australia, Australia},
title = {Evidence for {Predictive} {Coding} in {Human} {Auditory} {Cortex}},
booktitle = {International {Conference} on {Cognitive} {Neuroscience}},
publisher = {Frontiers in Neuroscience},
author = {Holdgraf, Christopher Ramsay and de Heer, Wendy and Pasley, Brian N. and Knight, Robert T.},
year = {2014}
}
@article{holdgraf_rapid_2016,
title = {Rapid tuning shifts in human auditory cortex enhance speech intelligibility},
volume = {7},
issn = {2041-1723},
url = {http://www.nature.com/doifinder/10.1038/ncomms13654},
doi = {10.1038/ncomms13654},
number = {May},
journal = {Nature Communications},
author = {Holdgraf, Christopher Ramsay and de Heer, Wendy and Pasley, Brian N. and Rieger, Jochem W. and Crone, Nathan and Lin, Jack J. and Knight, Robert T. and Theunissen, Frédéric E.},
year = {2016},
pages = {13654},
file = {Holdgraf et al. - 2016 - Rapid tuning shifts in human auditory cortex enhance speech intelligibility.pdf:C\:\\Users\\chold\\Zotero\\storage\\MDQP3JWE\\Holdgraf et al. - 2016 - Rapid tuning shifts in human auditory cortex enhance speech intelligibility.pdf:application/pdf}
}
@inproceedings{holdgraf_portable_2017,
title = {Portable learning environments for hands-on computational instruction using container-and cloud-based technology to teach data science},
volume = {Part F1287},
isbn = {978-1-4503-5272-7},
doi = {10.1145/3093338.3093370},
abstract = {© 2017 ACM. There is an increasing interest in learning outside of the traditional classroom setting. This is especially true for topics covering computational tools and data science, as both are challenging to incorporate in the standard curriculum. These atypical learning environments offer new opportunities for teaching, particularly when it comes to combining conceptual knowledge with hands-on experience/expertise with methods and skills. Advances in cloud computing and containerized environments provide an attractive opportunity to improve the effciency and ease with which students can learn. This manuscript details recent advances towards using commonly-Available cloud computing services and advanced cyberinfrastructure support for improving the learning experience in bootcamp-style events. We cover the benets (and challenges) of using a server hosted remotely instead of relying on student laptops, discuss the technology that was used in order to make this possible, and give suggestions for how others could implement and improve upon this model for pedagogy and reproducibility.},
booktitle = {{ACM} {International} {Conference} {Proceeding} {Series}},
author = {Holdgraf, Christopher Ramsay and Culich, A. and Rokem, A. and Deniz, F. and Alegro, M. and Ushizima, D.},
year = {2017},
keywords = {Teaching, Bootcamps, Cloud computing, Data science, Docker, Pedagogy}
}
@article{holdgraf_encoding_2017,
title = {Encoding and decoding models in cognitive electrophysiology},
volume = {11},
issn = {16625137},
doi = {10.3389/fnsys.2017.00061},
abstract = {© 2017 Holdgraf, Rieger, Micheli, Martin, Knight and Theunissen. Cognitive neuroscience has seen rapid growth in the size and complexity of data recorded from the human brain as well as in the computational tools available to analyze this data. This data explosion has resulted in an increased use of multivariate, model-based methods for asking neuroscience questions, allowing scientists to investigate multiple hypotheses with a single dataset, to use complex, time-varying stimuli, and to study the human brain under more naturalistic conditions. These tools come in the form of “Encoding” models, in which stimulus features are used to model brain activity, and “Decoding” models, in which neural features are used to generated a stimulus output. Here we review the current state of encoding and decoding models in cognitive electrophysiology and provide a practical guide toward conducting experiments and analyses in this emerging field. Our examples focus on using linear models in the study of human language and audition. We show how to calculate auditory receptive fields from natural sounds as well as how to decode neural recordings to predict speech. The paper aims to be a useful tutorial to these approaches, and a practical introduction to using machine learning and applied statistics to build models of neural activity. The data analytic approaches we discuss may also be applied to other sensory modalities, motor systems, and cognitive systems, and we cover some examples in these areas. In addition, a collection of Jupyter notebooks is publicly available as a complement to the material covered in this paper, providing code examples and tutorials for predictive modeling in python. The aimis to provide a practical understanding of predictivemodeling of human brain data and to propose best-practices in conducting these analyses.},
journal = {Frontiers in Systems Neuroscience},
author = {Holdgraf, Christopher Ramsay and Rieger, J.W. and Micheli, C. and Martin, S. and Knight, R.T. and Theunissen, F.E.},
year = {2017},
keywords = {Decoding models, Encoding models, Electrocorticography (ECoG), Electrophysiology/evoked potentials, Machine learning applied to neuroscience, Natural stimuli, Predictive modeling, Tutorials}
}
@book{ruby,
title = {The Ruby Programming Language},
author = {Flanagan, David and Matsumoto, Yukihiro},
year = {2008},
publisher = {O'Reilly Media}
}
% L96 Jupyter Book
@article{Lorenz1995,
title = {Predictability: a problem partly solved},
journal = {Seminar on Predictability},
volume = {1},
year = {1995},
pages = {1-18},
publisher = {ECMWF},
organization = {ECMWF},
address = {Shinfield Park, Reading},
url = {https://www.ecmwf.int/node/10829},
author = {Lorenz, E.N.}
}
@article{Wilks2005,
doi = {10.1256/qj.04.03},
url = {https://doi.org/10.1256/qj.04.03},
year = {2005},
publisher = {Wiley},
volume = {131},
number = {606},
pages = {389--407},
author = {Daniel S. Wilks},
title = {Effects of stochastic parametrizations in the Lorenz {\textquotesingle}96 system},
journal = {Quarterly Journal of the Royal Meteorological Society}
}
@article{Arnold2013,
doi = {10.1098/rsta.2011.0479},
url = {https://doi.org/10.1098/rsta.2011.0479},
year = {2013},
publisher = {The Royal Society},
volume = {371},
number = {1991},
pages = {20110479},
author = {H. M. Arnold and I. M. Moroz and T. N. Palmer},
title = {Stochastic parametrizations and model uncertainty in the Lorenz '96 system},
journal = {Philosophical Transactions of the Royal Society A: Mathematical, Physical and Engineering Sciences}
}
@article{Brajard2021,
doi = {10.1098/rsta.2020.0086},
url = {https://doi.org/10.1098/rsta.2020.0086},
year = {2021},
month = feb,
publisher = {The Royal Society},
volume = {379},
number = {2194},
pages = {20200086},
author = {Julien Brajard and Alberto Carrassi and Marc Bocquet and Laurent Bertino},
title = {Combining data assimilation and machine learning to infer unresolved scale parametrization},
journal = {Philosophical Transactions of the Royal Society A: Mathematical, Physical and Engineering Sciences}
}
@article{Schneider2017,
doi = {10.1002/2017gl076101},
url = {https://doi.org/10.1002/2017gl076101},
year = {2017},
month = dec,
publisher = {American Geophysical Union ({AGU})},
volume = {44},
number = {24},
author = {Tapio Schneider and Shiwei Lan and Andrew Stuart and Jo{\~{a}}o Teixeira},
title = {Earth System Modeling 2.0: A Blueprint for Models That Learn From Observations and Targeted High-Resolution Simulations},
journal = {Geophysical Research Letters}
}
@article{Russell2017,
doi = {10.1016/j.cpc.2017.08.011},
url = {https://doi.org/10.1016/j.cpc.2017.08.011},
year = {2017},
month = dec,
publisher = {Elsevier {BV}},
volume = {221},
pages = {160--173},
author = {Francis P. Russell and Peter D. D\"{u}ben and Xinyu Niu and Wayne Luk and T.N. Palmer},
title = {Exploiting the chaotic behaviour of atmospheric models with reconfigurable architectures},
journal = {Computer Physics Communications}
}