-
Notifications
You must be signed in to change notification settings - Fork 1
/
references.bib
812 lines (730 loc) · 64 KB
/
references.bib
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
@Preamble{ " \newcommand{\noop}[1]{} " }
@article{brodrick_uncovering_2019,
title = {Uncovering {Ecological} {Patterns} with {Convolutional} {Neural} {Networks}},
volume = {34},
issn = {01695347},
url = {https://linkinghub.elsevier.com/retrieve/pii/S0169534719300862},
doi = {10.1016/j.tree.2019.03.006},
language = {en},
number = {8},
urldate = {2021-07-30},
journal = {Trends in Ecology \& Evolution},
author = {Brodrick, Philip G. and Davies, Andrew B. and Asner, Gregory P.},
month = aug,
year = {2019},
pages = {734--745},
file = {Brodrick et al. - 2019 - Uncovering Ecological Patterns with Convolutional .pdf:/Users/oliviergimenez/Zotero/storage/AVHP39E2/Brodrick et al. - 2019 - Uncovering Ecological Patterns with Convolutional .pdf:application/pdf},
}
@article{lamba_deep_2019,
title = {Deep learning for environmental conservation},
volume = {29},
issn = {09609822},
url = {https://linkinghub.elsevier.com/retrieve/pii/S0960982219310322},
doi = {10.1016/j.cub.2019.08.016},
language = {en},
number = {19},
urldate = {2021-07-30},
journal = {Current Biology},
author = {Lamba, Aakash and Cassey, Phillip and Segaran, Ramesh Raja and Koh, Lian Pin},
month = oct,
year = {2019},
pages = {R977--R982},
file = {Lamba et al. - 2019 - Deep learning for environmental conservation.pdf:/Users/oliviergimenez/Zotero/storage/TBZDWFQ7/Lamba et al. - 2019 - Deep learning for environmental conservation.pdf:application/pdf},
}
@article{botella2018,
title = {Species distribution modeling based on the automated identification of citizen observations},
volume = {6},
number = {2},
journal = {Applications in Plant Sciences},
author = {Botella, Christophe and Joly, Alexis and Bonnet, Pierre and Monestiez, Pascal and Munoz, François},
year = {2018},
pages = {e1029},
}
@article{weinstein_computer_2018,
title = {A computer vision for animal ecology},
volume = {87},
issn = {00218790},
url = {https://onlinelibrary.wiley.com/doi/10.1111/1365-2656.12780},
doi = {10.1111/1365-2656.12780},
language = {en},
number = {3},
urldate = {2021-07-30},
journal = {Journal of Animal Ecology},
author = {Weinstein, Ben G.},
editor = {Prugh, Laura},
month = may,
year = {2018},
pages = {533--545},
file = {Weinstein - 2018 - A computer vision for animal ecology.pdf:/Users/oliviergimenez/Zotero/storage/9TYSGDGD/Weinstein - 2018 - A computer vision for animal ecology.pdf:application/pdf},
}
@article{strubell2019energy,
title = {Energy and Policy Considerations for Deep Learning in {NLP}},
url = {http://arxiv.org/abs/1906.02243},
journal = {arXiv:1906.02243},
author = {Emma Strubell and Ananya Ganesh and Andrew McCallum},
year = {2019},
note = {arXiv: 1906.02243}}
@article{beery_recognition_2018,
title = {Recognition in {Terra} {Incognita}},
url = {http://arxiv.org/abs/1807.04975},
journal = {arXiv:1807.04975},
author = {Beery, Sara and van Horn, Grant and Perona, Pietro},
year = {2018},
note = {arXiv: 1807.04975}}
@article{willi_identifying_2019,
title = {Identifying animal species in camera trap images using deep learning and citizen science},
volume = {10},
issn = {2041-210X, 2041-210X},
url = {https://onlinelibrary.wiley.com/doi/10.1111/2041-210X.13099},
doi = {10.1111/2041-210X.13099},
language = {en},
number = {1},
urldate = {2021-07-30},
journal = {Methods in Ecology and Evolution},
author = {Willi, Marco and Pitman, Ross T. and Cardoso, Anabelle W. and Locke, Christina and Swanson, Alexandra and Boyer, Amy and Veldthuis, Marten and Fortson, Lucy},
editor = {Gaggiotti, Oscar},
month = jan,
year = {2019},
pages = {80--91},
file = {Willi et al. - 2019 - Identifying animal species in camera trap images u.pdf:/Users/oliviergimenez/Zotero/storage/TKAYQZB8/Willi et al. - 2019 - Identifying animal species in camera trap images u.pdf:application/pdf},
}
@article{tabak_machine_2019,
title = {Machine learning to classify animal species in camera trap images: {Applications} in ecology},
volume = {10},
issn = {2041-210X, 2041-210X},
shorttitle = {Machine learning to classify animal species in camera trap images},
url = {https://onlinelibrary.wiley.com/doi/10.1111/2041-210X.13120},
doi = {10.1111/2041-210X.13120},
language = {en},
number = {4},
urldate = {2021-07-30},
journal = {Methods in Ecology and Evolution},
author = {Tabak, Michael A. and Norouzzadeh, Mohammad S. and Wolfson, David W. and Sweeney, Steven J. and Vercauteren, Kurt C. and Snow, Nathan P. and Halseth, Joseph M. and Di Salvo, Paul A. and Lewis, Jesse S. and White, Michael D. and Teton, Ben and Beasley, James C. and Schlichting, Peter E. and Boughton, Raoul K. and Wight, Bethany and Newkirk, Eric S. and Ivan, Jacob S. and Odell, Eric A. and Brook, Ryan K. and Lukacs, Paul M. and Moeller, Anna K. and Mandeville, Elizabeth G. and Clune, Jeff and Miller, Ryan S.},
editor = {Photopoulou, Theoni},
month = apr,
year = {2019},
pages = {585--590},
file = {Tabak et al. - 2019 - Machine learning to classify animal species in cam.pdf:/Users/oliviergimenez/Zotero/storage/L8MFLYYL/Tabak et al. - 2019 - Machine learning to classify animal species in cam.pdf:application/pdf},
}
@article{schneider_past_2019,
title = {Past, present and future approaches using computer vision for animal re‐identification from camera trap data},
volume = {10},
issn = {2041-210X, 2041-210X},
url = {https://onlinelibrary.wiley.com/doi/10.1111/2041-210X.13133},
doi = {10.1111/2041-210X.13133},
language = {en},
number = {4},
urldate = {2021-07-30},
journal = {Methods in Ecology and Evolution},
author = {Schneider, Stefan and Taylor, Graham W. and Linquist, Stefan and Kremer, Stefan C.},
editor = {O'Hara, Robert B.},
month = apr,
year = {2019},
pages = {461--470},
file = {Schneider et al. - 2019 - Past, present and future approaches using computer.pdf:/Users/oliviergimenez/Zotero/storage/GXYIE9QU/Schneider et al. - 2019 - Past, present and future approaches using computer.pdf:application/pdf},
}
@article{christin_applications_2019,
title = {Applications for deep learning in ecology},
volume = {10},
issn = {2041-210X, 2041-210X},
url = {https://onlinelibrary.wiley.com/doi/10.1111/2041-210X.13256},
doi = {10.1111/2041-210X.13256},
language = {en},
number = {10},
urldate = {2021-07-30},
journal = {Methods in Ecology and Evolution},
author = {Christin, Sylvain and Hervet, Éric and Lecomte, Nicolas},
editor = {Ye, Hao},
month = oct,
year = {2019},
pages = {1632--1644},
file = {Christin et al. - 2019 - Applications for deep learning in ecology.pdf:/Users/oliviergimenez/Zotero/storage/MMVYQTQQ/Christin et al. - 2019 - Applications for deep learning in ecology.pdf:application/pdf},
}
@article{ferreira_deep_2020,
title = {Deep learning‐based methods for individual recognition in small birds},
volume = {11},
issn = {2041-210X, 2041-210X},
url = {https://onlinelibrary.wiley.com/doi/10.1111/2041-210X.13436},
doi = {10.1111/2041-210X.13436},
language = {en},
number = {9},
urldate = {2021-07-30},
journal = {Methods in Ecology and Evolution},
author = {Ferreira, André C. and Silva, Liliana R. and Renna, Francesco and Brandl, Hanja B. and Renoult, Julien P. and Farine, Damien R. and Covas, Rita and Doutrelant, Claire},
editor = {Codling, Edward},
month = sep,
year = {2020},
pages = {1072--1085},
file = {Ferreira et al. - 2020 - Deep learning‐based methods for individual recogni.pdf:/Users/oliviergimenez/Zotero/storage/WS5R69TC/Ferreira et al. - 2020 - Deep learning‐based methods for individual recogni.pdf:application/pdf},
}
@article{norouzzadeh_deep_2021,
title = {A deep active learning system for species identification and counting in camera trap images},
volume = {12},
issn = {2041-210X, 2041-210X},
url = {https://onlinelibrary.wiley.com/doi/10.1111/2041-210X.13504},
doi = {10.1111/2041-210X.13504},
language = {en},
number = {1},
urldate = {2021-07-30},
journal = {Methods in Ecology and Evolution},
author = {Norouzzadeh, Mohammad Sadegh and Morris, Dan and Beery, Sara and Joshi, Neel and Jojic, Nebojsa and Clune, Jeff},
editor = {Schofield, Matthew},
month = jan,
year = {2021},
pages = {150--161},
file = {Norouzzadeh et al. - 2021 - A deep active learning system for species identifi.pdf:/Users/oliviergimenez/Zotero/storage/WNXGEBUH/Norouzzadeh et al. - 2021 - A deep active learning system for species identifi.pdf:application/pdf},
}
@article{bogucki_applying_2019,
title = {Applying deep learning to right whale photo identification},
volume = {33},
issn = {0888-8892, 1523-1739},
url = {https://onlinelibrary.wiley.com/doi/10.1111/cobi.13226},
doi = {10.1111/cobi.13226},
abstract = {Photo identification is an important tool for estimating abundance and monitoring population trends over time. However, manually matching photographs to known individuals is time-consuming. Motivated by recent developments in image recognition, we hosted a data science challenge on the crowdsourcing platform Kaggle to automate the identification of endangered North Atlantic right whales (Eubalaena glacialis). The winning solution automatically identified individual whales with 87\% accuracy with a series of convolutional neural networks to identify the region of interest on an image, rotate, crop, and create standardized photographs of uniform size and orientation and then identify the correct individual whale from these passport-like photographs. Recent advances in deep learning coupled with this fully automated workflow have yielded impressive results and have the potential to revolutionize traditional methods for the collection of data on the abundance and distribution of wild populations. Presenting these results to a broad audience should further bridge the gap between the data science and conservation science communities.},
language = {en},
number = {3},
urldate = {2021-07-30},
journal = {Conservation Biology},
author = {Bogucki, Robert and Cygan, Marek and Khan, Christin Brangwynne and Klimek, Maciej and Milczek, Jan Kanty and Mucha, Marcin},
month = jun,
year = {2019},
pages = {676--684},
file = {Bogucki et al. - 2019 - Applying deep learning to right whale photo identi.pdf:/Users/oliviergimenez/Zotero/storage/KCYYFQVY/Bogucki et al. - 2019 - Applying deep learning to right whale photo identi.pdf:application/pdf},
}
@article{chen_study_2020,
title = {A study on giant panda recognition based on images of a large proportion of captive pandas},
volume = {10},
issn = {2045-7758, 2045-7758},
url = {https://onlinelibrary.wiley.com/doi/10.1002/ece3.6152},
doi = {10.1002/ece3.6152},
language = {en},
number = {7},
urldate = {2021-07-30},
journal = {Ecology and Evolution},
author = {Chen, Peng and Swarup, Pranjal and Matkowski, Wojciech Michal and Kong, Adams Wai Kin and Han, Su and Zhang, Zhihe and Rong, Hou},
month = apr,
year = {2020},
pages = {3561--3573},
file = {Chen et al. - 2020 - A study on giant panda recognition based on images.pdf:/Users/oliviergimenez/Zotero/storage/CZIY58XU/Chen et al. - 2020 - A study on giant panda recognition based on images.pdf:application/pdf},
}
@article{tabak_improving_2020,
title = {Improving the accessibility and transferability of machine learning algorithms for identification of animals in camera trap images: {MLWIC2}},
volume = {10},
issn = {2045-7758, 2045-7758},
shorttitle = {Improving the accessibility and transferability of machine learning algorithms for identification of animals in camera trap images},
url = {https://onlinelibrary.wiley.com/doi/10.1002/ece3.6692},
doi = {10.1002/ece3.6692},
language = {en},
number = {19},
urldate = {2021-07-30},
journal = {Ecology and Evolution},
author = {Tabak, Michael A. and Norouzzadeh, Mohammad S. and Wolfson, David W. and Newton, Erica J. and Boughton, Raoul K. and Ivan, Jacob S. and Odell, Eric A. and Newkirk, Eric S. and Conrey, Reesa Y. and Stenglein, Jennifer and Iannarilli, Fabiola and Erb, John and Brook, Ryan K. and Davis, Amy J. and Lewis, Jesse and Walsh, Daniel P. and Beasley, James C. and VerCauteren, Kurt C. and Clune, Jeff and Miller, Ryan S.},
month = oct,
year = {2020},
pages = {10374--10383},
file = {Tabak et al. - 2020 - Improving the accessibility and transferability of.pdf:/Users/oliviergimenez/Zotero/storage/JWLKCD74/Tabak et al. - 2020 - Improving the accessibility and transferability of.pdf:application/pdf},
}
@article{bolt_educating_2021,
title = {Educating the future generation of researchers: {A} cross-disciplinary survey of trends in analysis methods},
volume = {19},
issn = {1545-7885},
shorttitle = {Educating the future generation of researchers},
url = {https://dx.plos.org/10.1371/journal.pbio.3001313},
doi = {10.1371/journal.pbio.3001313},
abstract = {Methods for data analysis in the biomedical, life, and social (BLS) sciences are developing at a rapid pace. At the same time, there is increasing concern that education in quantitative methods is failing to adequately prepare students for contemporary research. These trends have led to calls for educational reform to undergraduate and graduate quantitative research method curricula. We argue that such reform should be based on data-driven insights into within- and cross-disciplinary use of analytic methods. Our survey of peer-reviewed literature analyzed approximately 1.3 million openly available research articles to monitor the cross-disciplinary mentions of analytic methods in the past decade. We applied data-driven text mining analyses to the “Methods” and “Results” sections of a large subset of this corpus to identify trends in analytic method mentions shared across disciplines, as well as those unique to each discipline. We found that the
t
test, analysis of variance (ANOVA), linear regression, chi-squared test, and other classical statistical methods have been and remain the most mentioned analytic methods in biomedical, life science, and social science research articles. However, mentions of these methods have declined as a percentage of the published literature between 2009 and 2020. On the other hand, multivariate statistical and machine learning approaches, such as artificial neural networks (ANNs), have seen a significant increase in the total share of scientific publications. We also found unique groupings of analytic methods associated with each BLS science discipline, such as the use of structural equation modeling (SEM) in psychology, survival models in oncology, and manifold learning in ecology. We discuss the implications of these findings for education in statistics and research methods, as well as within- and cross-disciplinary collaboration.},
language = {en},
number = {7},
urldate = {2021-07-30},
journal = {PLOS Biology},
author = {Bolt, Taylor and Nomi, Jason S. and Bzdok, Danilo and Uddin, Lucina Q.},
editor = {Dirnagl, Ulrich},
month = jul,
year = {2021},
pages = {e3001313},
file = {Bolt et al. - 2021 - Educating the future generation of researchers A .pdf:/Users/oliviergimenez/Zotero/storage/V8Q8RZ4H/Bolt et al. - 2021 - Educating the future generation of researchers A .pdf:application/pdf},
}
@article{miele_revisiting_2021,
title = {Revisiting animal photo‐identification using deep metric learning and network analysis},
volume = {12},
issn = {2041-210X, 2041-210X},
url = {https://onlinelibrary.wiley.com/doi/10.1111/2041-210X.13577},
doi = {10.1111/2041-210X.13577},
language = {en},
number = {5},
urldate = {2021-07-30},
journal = {Methods in Ecology and Evolution},
author = {Miele, Vincent and Dussert, Gaspard and Spataro, Bruno and Chamaillé‐Jammes, Simon and Allainé, Dominique and Bonenfant, Christophe},
editor = {Freckleton, Robert},
year = {2021},
pages = {863--873},
file = {Miele et al. - 2021 - Revisiting animal photo‐identification using deep .pdf:/Users/oliviergimenez/Zotero/storage/35S9AH3Z/Miele et al. - 2021 - Revisiting animal photo‐identification using deep .pdf:application/pdf},
}
@article{thompson_finfindr_2021,
title = {{finFindR}: {Automated} recognition and identification of marine mammal dorsal fins using residual convolutional neural networks},
issn = {0824-0469, 1748-7692},
shorttitle = {{\textless}span style="font-variant},
url = {https://onlinelibrary.wiley.com/doi/10.1111/mms.12849},
doi = {10.1111/mms.12849},
abstract = {Photographic identification is an essential research and management tool for marine mammal scientists. However, manual identification of individuals is time-consuming. To shorten processing times, we developed finFindR, an opensource application that uses a series of neural networks to autonomously locate dorsal fins in unedited field images, quantify an individual's unique fin characteristics, and match them to an existing photograph catalog. During a blind test comparing manual searching to finFindR for common bottlenose dolphin (Tursiops Tursiops truncatus) photographs, experienced photo-identification technicians achieved similar match rates but examined an order of magnitude fewer photographs using finFindR (an average of 10 required with finFindR versus 124 with manual search). In those tests, the correct identity was ranked in the first position in 88\% of cases and was within the top 50 ranked positions in 97\% of cases. Our observations suggest that finFindR's matching capabilities are robust to moderate variation in image quality and fin distinctiveness. Importantly, finFindR allows users to build a catalog of known individuals through time and match an unlimited number of individuals instead of being restricted to a predefined set. finFindR's convolutional neural networks could be re-trained to identify members of many marine mammal species without altering finFindR's inherent structure.},
language = {en},
urldate = {2021-07-30},
journal = {Marine Mammal Science},
author = {Thompson, Jaime W. and Zero, Victoria H. and Schwacke, Lori H. and Speakman, Todd R. and Quigley, Brian M. and Morey, Jeanine S. and McDonald, Trent L.},
month = jul,
year = {2021},
pages = {mms.12849},
file = {Thompson et al. - 2021 - finFind.pdf:/Users/oliviergimenez/Zotero/storage/NI88FPG3/Thompson et al. - 2021 - finFind.pdf:application/pdf},
}
@article{lahoz-monfort_comprehensive_2021,
title = {A {Comprehensive} {Overview} of {Technologies} for {Species} and {Habitat} {Monitoring} and {Conservation}},
issn = {0006-3568, 1525-3244},
url = {https://academic.oup.com/bioscience/advance-article/doi/10.1093/biosci/biab073/6322306},
doi = {10.1093/biosci/biab073},
abstract = {The range of technologies currently used in biodiversity conservation is staggering, with innovative uses often adopted from other disciplines and being trialed in the field. We provide the first comprehensive overview of the current (2020) landscape of conservation technology, encompassing technologies for monitoring wildlife and habitats, as well as for on-the-ground conservation management (e.g., fighting illegal activities). We cover both established technologies (routinely deployed in conservation, backed by substantial field experience and scientific literature) and novel technologies or technology applications (typically at trial stage, only recently used in conservation), providing examples of conservation applications for both types. We describe technologies that deploy sensors that are fixed or portable, attached to vehicles (terrestrial, aquatic, or airborne) or to animals (biologging), complemented with a section on wildlife tracking. The last two sections cover actuators and computing (including web platforms, algorithms, and artificial intelligence).},
language = {en},
urldate = {2021-07-30},
journal = {BioScience},
author = {Lahoz-Monfort, José J and Magrath, Michael J L},
year = {2021},
file = {Lahoz-Monfort et Magrath - 2021 - A Comprehensive Overview of Technologies for Speci.pdf:/Users/oliviergimenez/Zotero/storage/GI79AVA2/Lahoz-Monfort et Magrath - 2021 - A Comprehensive Overview of Technologies for Speci.pdf:application/pdf},
}
@article{porto_alpaca_2020,
title = {{ALPACA}: a fast and accurate approach for automated landmarking of three-dimensional biological structures},
shorttitle = {{ALPACA}},
url = {http://biorxiv.org/lookup/doi/10.1101/2020.09.18.303891},
abstract = {Landmark-based geometric morphometrics has emerged as an essential discipline for the quantitative analysis of size and shape in ecology and evolution. With the ever-increasing density of digitized landmarks, the possible development of a fully automated method of landmark placement has attracted considerable attention. Despite the recent progress in image registration techniques, which could provide a pathway to automation, three-dimensional morphometric data is still mainly gathered by trained experts. For the most part, the large infrastructure requirements necessary to perform image-based registration, together with its system-specificity and its overall speed have prevented wide dissemination.},
language = {en},
urldate = {2021-07-31},
journal = {Methods in Ecology and Evolution},
author = {Porto, Arthur and Rolfe, Sara M. and Maga, A. Murat},
year = {2020},
doi = {10.1101/2020.09.18.303891},
}
@article{miele2021,
TITLE = {{Images, {\'e}cologie et deep learning}},
AUTHOR = {Miele, Vincent and Dray, St{\'e}phane and Gimenez, Olivier O.},
URL = {https://hal.archives-ouvertes.fr/hal-03142486},
JOURNAL = {{Regards sur la biodiversit{\'e}}},
PUBLISHER = {{Soci{\'e}t{\'e} Fran{\c c}aise d'{\'E}cologie et d'{\'E}volution}},
YEAR = {2021},
MONTH = Feb,
PDF = {https://hal.archives-ouvertes.fr/hal-03142486/file/Regard%20miele-al.pdf},
HAL_ID = {hal-03142486},
HAL_VERSION = {v1},
}
@article{waldchen2018,
author = {Wäldchen, Jana and Mäder, Patrick},
title = {Machine learning for image based species identification},
journal = {Methods in Ecology and Evolution},
volume = {9},
number = {11},
pages = {2216-2225},
year = {2018}
}
@Article{fiske2011,
title = {{unmarked}: An {R} Package for Fitting Hierarchical Models
of Wildlife Occurrence and Abundance},
author = {Ian Fiske and Richard Chandler},
journal = {Journal of Statistical Software},
year = {2011},
volume = {43},
number = {10},
pages = {1--23},
}
@article{rota2016,
author = {Rota, Christopher T. and Ferreira, Marco A. R. and Kays, Roland W. and Forrester, Tavis D. and Kalies, Elizabeth L. and McShea, William J. and Parsons, Arielle W. and Millspaugh, Joshua J.},
title = {A multispecies occupancy model for two or more interacting species},
journal = {Methods in Ecology and Evolution},
volume = {7},
number = {10},
pages = {1164-1173},
year = {2016}
}
@article{lecun_deep_2015,
title = {Deep learning},
volume = {521},
issn = {0028-0836, 1476-4687},
url = {http://www.nature.com/articles/nature14539},
doi = {10.1038/nature14539},
language = {en},
number = {7553},
urldate = {2021-08-04},
journal = {Nature},
author = {LeCun, Yann and Bengio, Yoshua and Hinton, Geoffrey},
month = may,
year = {2015},
pages = {436--444},
file = {LeCun et al. - 2015 - Deep learning.pdf:/Users/oliviergimenez/Zotero/storage/9HJK95BE/LeCun et al. - 2015 - Deep learning.pdf:application/pdf},
}
@article{baraniuk_science_2020,
title = {The science of deep learning},
volume = {117},
issn = {0027-8424, 1091-6490},
url = {http://www.pnas.org/lookup/doi/10.1073/pnas.2020596117},
doi = {10.1073/pnas.2020596117},
language = {en},
number = {48},
urldate = {2021-08-04},
journal = {Proceedings of the National Academy of Sciences},
author = {Baraniuk, Richard and Donoho, David and Gavish, Matan},
month = dec,
year = {2020},
pages = {30029--30032},
file = {Baraniuk et al. - 2020 - The science of deep learning.pdf:/Users/oliviergimenez/Zotero/storage/SDAPU4FW/Baraniuk et al. - 2020 - The science of deep learning.pdf:application/pdf},
}
@article{andina_deep_2018,
title = {Deep {Learning} for {Computer} {Vision}: {A} {Brief} {Review}},
volume = {2018},
issn = {1687-5265},
url = {https://doi.org/10.1155/2018/7068349},
doi = {10.1155/2018/7068349},
abstract = {Over the last years deep learning methods have been shown to outperform previous state-of-the-art machine learning techniques in several fields, with computer vision being one of the most prominent cases. This review paper provides a brief overview of some of the most significant deep learning schemes used in computer vision problems, that is, Convolutional Neural Networks, Deep Boltzmann Machines and Deep Belief Networks, and Stacked Denoising Autoencoders. A brief account of their history, structure, advantages, and limitations is given, followed by a description of their applications in various computer vision tasks, such as object detection, face recognition, action and activity recognition, and human pose estimation. Finally, a brief overview is given of future directions in designing deep learning schemes for computer vision problems and the challenges involved therein.},
journal = {Computational Intelligence and Neuroscience},
author = {Voulodimos, Athanasios and Doulamis, Nikolaos and Doulamis, Anastasios and Protopapadakis, Eftychios},
editor = {Andina, Diego},
month = feb,
year = {2018},
note = {Publisher: Hindawi},
pages = {7068349},
}
@incollection{NIPS2012_4824,
author = {Krizhevsky, Alex and Sutskever, Ilya and Hinton, Geoffrey E.},
booktitle = {Advances in Neural Information Processing Systems 25},
editor = {Pereira, F. and Burges, C. J. C. and Bottou, L. and Weinberger, K. Q.},
pages = {1097--1105},
publisher = {Curran Associates, Inc.},
title = {ImageNet Classification with Deep Convolutional Neural Networks},
year = {2012},
}
@article{gimenez_statistical_2014,
title = {Statistical ecology comes of age},
volume = {10},
issn = {1744-9561, 1744-957X},
url = {https://royalsocietypublishing.org/doi/10.1098/rsbl.2014.0698},
doi = {10.1098/rsbl.2014.0698},
abstract = {The desire to predict the consequences of global environmental change has been the driver towards more realistic models embracing the variability and uncertainties inherent in ecology. Statistical ecology has gelled over the past decade as a discipline that moves away from describing patterns towards modelling the ecological processes that generate these patterns. Following the fourth International Statistical Ecology Conference (1–4 July 2014) in Montpellier, France, we analyse current trends in statistical ecology. Important advances in the analysis of individual movement, and in the modelling of population dynamics and species distributions, are made possible by the increasing use of hierarchical and hidden process models. Exciting research perspectives include the development of methods to interpret citizen science data and of efficient, flexible computational algorithms for model fitting. Statistical ecology has come of age: it now provides a general and mathematically rigorous framework linking ecological theory and empirical data.},
language = {en},
number = {12},
urldate = {2021-08-04},
journal = {Biology Letters},
author = {Gimenez, Olivier and Buckland, Stephen T. and Morgan, Byron J. T. and Bez, Nicolas and Bertrand, Sophie and Choquet, Rémi and Dray, Stéphane and Etienne, Marie-Pierre and Fewster, Rachel and Gosselin, Frédéric and Mérigot, Bastien and Monestiez, Pascal and Morales, Juan M. and Mortier, Frédéric and Munoz, François and Ovaskainen, Otso and Pavoine, Sandrine and Pradel, Roger and Schurr, Frank M. and Thomas, Len and Thuiller, Wilfried and Trenkel, Verena and de Valpine, Perry and Rexstad, Eric},
month = dec,
year = {2014},
pages = {20140698},
file = {Gimenez et al. - 2014 - Statistical ecology comes of age.pdf:/Users/oliviergimenez/Zotero/storage/SFQYKFZN/Gimenez et al. - 2014 - Statistical ecology comes of age.pdf:application/pdf},
}
@article{sutherland_identification_2013,
title = {Identification of 100 fundamental ecological questions},
volume = {101},
issn = {00220477},
url = {https://onlinelibrary.wiley.com/doi/10.1111/1365-2745.12025},
doi = {10.1111/1365-2745.12025},
language = {en},
number = {1},
urldate = {2021-08-04},
journal = {Journal of Ecology},
author = {Sutherland, William J. and Freckleton, Robert P. and Godfray, H. Charles J. and Beissinger, Steven R. and Benton, Tim and Cameron, Duncan D. and Carmel, Yohay and Coomes, David A. and Coulson, Tim and Emmerson, Mark C. and Hails, Rosemary S. and Hays, Graeme C. and Hodgson, Dave J. and Hutchings, Michael J. and Johnson, David and Jones, Julia P. G. and Keeling, Matt J. and Kokko, Hanna and Kunin, William E. and Lambin, Xavier and Lewis, Owen T. and Malhi, Yadvinder and Mieszkowska, Nova and Milner-Gulland, E. J. and Norris, Ken and Phillimore, Albert B. and Purves, Drew W. and Reid, Jane M. and Reuman, Daniel C. and Thompson, Ken and Travis, Justin M. J. and Turnbull, Lindsay A. and Wardle, David A. and Wiegand, Thorsten},
editor = {Gibson, David},
month = jan,
year = {2013},
pages = {58--67},
file = {Sutherland et al. - 2013 - Identification of 100 fundamental ecological quest.pdf:/Users/oliviergimenez/Zotero/storage/49J5J9TA/Sutherland et al. - 2013 - Identification of 100 fundamental ecological quest.pdf:application/pdf},
}
@article{wearn_responsible_2019,
title = {Responsible {AI} for conservation},
volume = {1},
issn = {2522-5839},
url = {http://www.nature.com/articles/s42256-019-0022-7},
doi = {10.1038/s42256-019-0022-7},
language = {en},
number = {2},
urldate = {2021-08-04},
journal = {Nature Machine Intelligence},
author = {Wearn, Oliver R. and Freeman, Robin and Jacoby, David M. P.},
month = feb,
year = {2019},
pages = {72--73},
file = {Wearn et al. - 2019 - Responsible AI for conservation.pdf:/Users/oliviergimenez/Zotero/storage/PVLPKMDH/Wearn et al. - 2019 - Responsible AI for conservation.pdf:application/pdf},
}
@article{lai_evaluating_2019,
title = {Evaluating the popularity of {R} in ecology},
volume = {10},
issn = {2150-8925, 2150-8925},
url = {https://onlinelibrary.wiley.com/doi/10.1002/ecs2.2567},
doi = {10.1002/ecs2.2567},
abstract = {The programming language R is widely used in many fields. We explored the extent of reported R use in the field of ecology using the Web of Science and text mining. We analyzed the frequencies of R packages reported in more than 60,000 peer-reviewed articles published in 30 ecology journals during a 10-yr period ending in 2017. The number of studies reported using R as their primary tool in data analysis increased linearly from 11.4\% in 2008 to 58.0\% in 2017. The top 10 packages reported were lme4, vegan, nlme, ape, MuMIn, MASS, mgcv, ade4, multcomp, and car. The increasing popularity of R has most likely furthered open science in ecological research because it can improve reproducibility of analyses and captures workflows when scripts and codes are included and shared. These findings may not be entirely unique to R because there are other programming languages used by ecologists, but they do strongly suggest that given the relatively high frequency of reported use of R, it is a significant component of contemporary analytics in the field of ecology.},
language = {en},
number = {1},
urldate = {2021-08-04},
journal = {Ecosphere},
author = {Lai, Jiangshan and Lortie, Christopher J. and Muenchen, Robert A. and Yang, Jian and Ma, Keping},
month = jan,
year = {2019},
file = {Lai et al. - 2019 - Evaluating the popularity of R in ecology.pdf:/Users/oliviergimenez/Zotero/storage/UXBJJVLJ/Lai et al. - 2019 - Evaluating the popularity of R in ecology.pdf:application/pdf},
}
@Manual{reticulate_ref,
author = {JJ Allaire and Kevin Ushey and Yuan Tang and Dirk
Eddelbuettel},
title = {reticulate: R Interface to Python},
year = {2017},
url = {https://github.com/rstudio/reticulate},
}
@article{breitenmoser_large_1998,
series = {Conservation {Biology} and {Biodiversity} {Strategies}},
title = {Large predators in the {Alps}: {The} fall and rise of man's competitors},
volume = {83},
issn = {0006-3207},
shorttitle = {Large predators in the {Alps}},
url = {https://www.sciencedirect.com/science/article/pii/S0006320797000840},
doi = {10.1016/S0006-3207(97)00084-0},
abstract = {The brown bear Ursus arctos, wolf Canis lupus, and Eurasian lynx Lynx lynx vanished during the 18th and 19th centuries from all regions of high human activity in Europe because of direct persecution and environmental changes. Bear, wolf, and lynx were vulnerable in different ways to deforestation and the destruction of wild ungulate populations. Analysing the ecological factors responsible for the fall of the large carnivores can help to prepare their recovery. The return of large predators into semi-natural areas such as the Alps is possible, as the forests have expanded, and the wild ungulate populations increased. Lynx reintroduction in the Alps started in the 1970s. Wolves returned to the south-western Alps from the central Italian population in the early 1990s. The brown bear is recolonising the Austrian Alps from Slovenia. However, the modern protective legislation is not backed by a cooperative attitude among the affected people. In rural areas, large carnivores are still regarded as unrestrained killers of wildlife and livestock. Ecological conditions and husbandry in the Alps have been altered substantially since the large carnivores were eradicated, and the potential for conflicts has diminished. But stockmen have lost any remaining tradition of coexistence with large predators, and sheep are again very abundant in the Swiss Alps. The return of the large predators will not be possible without changing the system of sheep-husbandry. The rural people are not yet willing to do so. They generally object to any change in their lifestyle induced from outside, and the large predators become a negative symbol for restrictive conservation measures considered to hinder economic development. Nature conservation, including the reintegration of large predators, must be integrated into rural development; local people must be much more involved in this process.},
language = {en},
number = {3},
urldate = {2021-08-05},
journal = {Biological Conservation},
author = {Breitenmoser, Urs},
month = mar,
year = {1998},
keywords = {Alps, conservation strategy, large predators, rural development, Switzerland},
pages = {279--289},
}
@article{vandel_distribution_2005,
title = {Distribution trend of the {Eurasian} lynx {Lynx} lynx populations in {France}},
volume = {69},
issn = {0025-1461, 1864-1547},
url = {https://www.degruyter.com/document/doi/10.1515/mamm.2005.013/html},
doi = {10.1515/mamm.2005.013},
abstract = {The changes in the distribution of the lynx populations in France were analysed with 3,760 data gathered between 1974 and 2002. Maps were drawn for successive three-year periods on a fixed 3 km × 3 km grid. Lynx were assumed “present” in every 3 × 3 km grid with one or several data, as well as in the eight surrounding grids, i.e. in a total area of 81 km2. In 20002002, the total and permanent lynx range covered, respectively, 8,622 km2 and 5,823 km2 in the French Jura mountains (11,500 km2 and at least 6,000 km2 for the whole French and Swiss Jura Massif). Almost all forested mountain massifs are already occupied by lynx in that region. The total and permanent lynx range covered 3,159 km2 and 1,962 km2 in the Vosges Massif. This small range and still precarious status of the population 20 years after the first lynx re-introductions, should be related to the high mortality rate of the released lynx and low number of founders. Exchanges between the Jura and the Vosges lynx populations could however be expected in the near future. The total lynx range covered 4,365 km2 in the Alps but no permanent area of presence was noticed. The regular increase in the number of islets of presence and the progressive colonization from the north towards the extreme south of the Alps suggested an insufficient observation pressure rather than the absence of any established population in the French Alps. In the future, the status of these three reintroduced populations should probably be comforted by exchanges between the Jura and the Vosges mountains and the Jura and the northern Alps.},
language = {en},
number = {2},
urldate = {2021-08-05},
journal = {Mammalia},
author = {Vandel, Jean-Michel and Stahl, Philippe},
month = jan,
year = {2005},
file = {Vandel et Stahl - 2005 - Distribution trend of the Eurasian lynx Lynx lynx .pdf:/Users/oliviergimenez/Zotero/storage/XEEDNEDN/Vandel et Stahl - 2005 - Distribution trend of the Eurasian lynx Lynx lynx .pdf:application/pdf},
}
@article{molinari-jobin_variation_2007,
title = {Variation in diet, prey selectivity and home-range size of {Eurasian} lynx {Lynx} lynx in {Switzerland}},
volume = {13},
issn = {0909-6396, 1903-220X},
url = {https://bioone.org/journals/wildlife-biology/volume-13/issue-4/0909-6396_2007_13_393_VIDPSA_2.0.CO_2/Variation-in-diet-prey-selectivity-and-home-range-size-of/10.2981/0909-6396(2007)13[393:VIDPSA]2.0.CO;2.full},
doi = {10.2981/0909-6396(2007)13[393:VIDPSA]2.0.CO;2},
abstract = {To analyse the factors responsible for the interplay of Eurasian lynx Lynx lynx predation and home-range size, we reviewed patterns of lynx predation in Switzerland by comparing the prey spectrum of lynx in five studies performed in the following study areas: the northwestern Alps, where lynx were studied both in the 1980s and 1990s, the central Alps, the Jura Mountains, and northeastern Switzerland. We then compared home-range size of female lynx with two indirect measures of prey abundance, roe deer Capreolus capreolus and chamois Rupicapra rupicapra harvested per km2 and habitat suitability for roe deer and chamois as derived from a GIS model. Lynx diets were similar among sites. Roe deer and chamois made up 90\% of prey items in all five studies. Comparing the proportion of roe deer and chamois in the diet with availability, Manly's preference indices indicated selective predation in all studies. Roe deer were preferred over chamois in all areas except in the Jura Mountains where relatively few chamois were present. Predation was least selective in northeastern Switzerland, where the initial phase of recolonisation by lynx was studied. Variation in prey availability is often identified as an important factor explaining intraspecific variation in home-range size. Due to differences in roe deer and chamois abundance from one study area to another, we expected female lynx home ranges to decrease with increasing prey abundance. The predictors for Minimum Convex Polygon (MCP) and Kernel home-range estimators differed. MCP home-range sizes were best explained by the interactions of study with the number of locations per lynx, roe deer harvested per km2, and good roe deer habitat, whereas Kernel home-range sizes were best explained by the interactions of study with good roe deer habitat, good chamois habitat, and the interaction of good roe deer and chamois habitat plus an additive effect of the study. Contrary to our expectations, there was no simple correlation of prime roe deer and chamois habitat nor between the number of roe deer and chamois harvested per km2 and the size of female lynx home ranges. The comparison of the five studies suggested that this expectation may only be valid if lynx populations are close to carrying capacity (e.g. the Jura Mountains and the northwestern Alps in the 1990s). For predictions of home-range size both habitat (spatial factor) and the status and dynamic of the predator/prey populations (temporal factor) need to be taken into account.},
number = {4},
urldate = {2021-08-05},
journal = {Wildlife Biology},
author = {Molinari-Jobin, Anja and Zimmermann, Fridolin and Ryser, Andreas and Breitenmoser-Würsten, Christine and Capt, Simon and Breitenmoser, Urs and Molinari, Paolo and Haller, Heinrich and Eyholzer, Roman},
month = dec,
year = {2007},
note = {Publisher: Nordic Board for Wildlife Research},
pages = {393--405},
file = {Full Text PDF:/Users/oliviergimenez/Zotero/storage/CQNFJCMH/Molinari-Jobin et al. - 2007 - Variation in diet, prey selectivity and home-range.pdf:application/pdf},
}
@article{gimenez_spatial_2019,
title = {Spatial density estimates of {Eurasian} lynx ({Lynx} lynx) in the {French} {Jura} and {Vosges} {Mountains}},
volume = {9},
issn = {2045-7758},
url = {https://onlinelibrary.wiley.com/doi/abs/10.1002/ece3.5668},
doi = {10.1002/ece3.5668},
abstract = {Obtaining estimates of animal population density is a key step in providing sound conservation and management strategies for wildlife. For many large carnivores however, estimating density is difficult because these species are elusive and wide-ranging. Here, we focus on providing the first density estimates of the Eurasian lynx (Lynx lynx) in the French Jura and Vosges mountains. We sampled a total of 413 camera trapping sites (with two cameras per site) between January 2011 and April 2016 in seven study areas across seven counties of the French Jura and Vosges mountains. We obtained 592 lynx detections over 19,035 trap days in the Jura mountains and 0 detection over 6,804 trap days in the Vosges mountains. Based on coat patterns, we identified a total number of 92 unique individuals from photographs, including 16 females, 13 males, and 63 individuals of unknown sex. Using spatial capture–recapture (SCR) models, we estimated abundance in the study areas between 5 (SE = 0.1) and 29 (0.2) lynx and density between 0.24 (SE = 0.02) and 0.91 (SE = 0.03) lynx per 100 km2. We also provide a comparison with nonspatial density estimates and discuss the observed discrepancies. Our study is yet another example of the advantage of combining SCR methods and noninvasive sampling techniques to estimate density for elusive and wide-ranging species, like large carnivores. While the estimated densities in the French Jura mountains are comparable to other lynx populations in Europe, the fact that we detected no lynx in the Vosges mountains is alarming. Connectivity should be encouraged between the French Jura mountains, the Vosges mountains, and the Palatinate Forest in Germany where a reintroduction program is currently ongoing. Our density estimates will help in setting a baseline conservation status for the lynx population in France.},
language = {en},
number = {20},
urldate = {2021-08-05},
journal = {Ecology and Evolution},
author = {Gimenez, Olivier and Gatti, Sylvain and Duchamp, Christophe and Germain, Estelle and Laurent, Alain and Zimmermann, Fridolin and Marboutin, Eric},
year = {2019},
note = {\_eprint: https://onlinelibrary.wiley.com/doi/pdf/10.1002/ece3.5668},
keywords = {camera trapping, large carnivores, noninvasive sampling, photo identification, spatially explicit capture–recapture models},
pages = {11707--11715},
file = {Full Text PDF:/Users/oliviergimenez/Zotero/storage/5IFVXMJ6/Gimenez et al. - 2019 - Spatial density estimates of Eurasian lynx (Lynx l.pdf:application/pdf},
}
@article{zimmermann_optimizing_2013,
title = {Optimizing the size of the area surveyed for monitoring a {Eurasian} lynx ({Lynx} lynx) population in the {Swiss} {Alps} by means of photographic capture-recapture},
volume = {8},
issn = {1749-4877},
doi = {10.1111/1749-4877.12017},
abstract = {We studied the influence of surveyed area size on density estimates by means of camera-trapping in a low-density felid population (1-2 individuals/100 km(2) ). We applied non-spatial capture-recapture (CR) and spatial CR (SCR) models for Eurasian lynx during winter 2005/2006 in the northwestern Swiss Alps by sampling an area divided into 5 nested plots ranging from 65 to 760 km(2) . CR model density estimates (95\% CI) for models M0 and Mh decreased from 2.61 (1.55-3.68) and 3.6 (1.62-5.57) independent lynx/100 km(2) , respectively, in the smallest to 1.20 (1.04-1.35) and 1.26 (0.89-1.63) independent lynx/100 km(2) , respectively, in the largest area surveyed. SCR model density estimates also decreased with increasing sampling area but not significantly. High individual range overlaps in relatively small areas (the edge effect) is the most plausible reason for this positive bias in the CR models. Our results confirm that SCR models are much more robust to changes in trap array size than CR models, thus avoiding overestimation of density in smaller areas. However, when a study is concerned with monitoring population changes, large spatial efforts (area surveyed ≥760 km(2) ) are required to obtain reliable and precise density estimates with these population densities and recapture rates.},
language = {eng},
number = {3},
journal = {Integrative Zoology},
author = {Zimmermann, Fridolin and Breitenmoser-Würsten, Christine and Molinari-Jobin, Anja and Breitenmoser, Urs},
month = sep,
year = {2013},
pmid = {24020463},
keywords = {Animal Distribution, Animal Identification Systems, Animals, area surveyed, density estimation, Lynx, Lynx lynx, Models, Theoretical, photographic capture-recapture, Photography, Population Density, Sample Size, Swiss Alps, Switzerland},
pages = {232--243},
}
@article{Rota2016,
author = {Rota, Christopher T. and Ferreira, Marco A. R. and Kays, Roland W. and Forrester, Tavis D. and Kalies, Elizabeth L. and McShea, William J. and Parsons, Arielle W. and Millspaugh, Joshua J.},
title = {A multispecies occupancy model for two or more interacting species},
journal = {Methods in Ecology and Evolution},
volume = {7},
number = {10},
pages = {1164-1173},
keywords = {community, competition, eMammal, interspecific interactions, multinomial logit, multinomial probit, multivariate Bernoulli, occupancy modelling, predation},
doi = {https://doi.org/10.1111/2041-210X.12587},
url = {https://besjournals.onlinelibrary.wiley.com/doi/abs/10.1111/2041-210X.12587},
eprint = {https://besjournals.onlinelibrary.wiley.com/doi/pdf/10.1111/2041-210X.12587},
abstract = {Summary Species occurrence is influenced by environmental conditions and the presence of other species. Current approaches for multispecies occupancy modelling are practically limited to two interacting species and often require the assumption of asymmetric interactions. We propose a multispecies occupancy model that can accommodate two or more interacting species. We generalize the single-species occupancy model to two or more interacting species by assuming the latent occupancy state is a multivariate Bernoulli random variable. We propose modelling the probability of each potential latent occupancy state with both a multinomial logit and a multinomial probit model and present details of a Gibbs sampler for the latter. As an example, we model co-occurrence probabilities of bobcat (Lynx rufus), coyote (Canis latrans), grey fox (Urocyon cinereoargenteus) and red fox (Vulpes vulpes) as a function of human disturbance variables throughout 6 Mid-Atlantic states in the eastern United States. We found evidence for pairwise interactions among most species, and the probability of some pairs of species occupying the same site varied along environmental gradients; for example, occupancy probabilities of coyote and grey fox were independent at sites with little human disturbance, but these two species were more likely to occur together at sites with high human disturbance. Ecological communities are composed of multiple interacting species. Our proposed method improves our ability to draw inference from such communities by permitting modelling of detection/non-detection data from an arbitrary number of species, without assuming asymmetric interactions. Additionally, our proposed method permits modelling the probability two or more species occur together as a function of environmental variables. These advancements represent an important improvement in our ability to draw community-level inference from multiple interacting species that are subject to imperfect detection.},
year = {2016}
}
@article{dai_multivariate_2013,
title = {Multivariate {Bernoulli} distribution},
volume = {19},
issn = {1350-7265},
url = {https://projecteuclid.org/journals/bernoulli/volume-19/issue-4/Multivariate-Bernoulli-distribution/10.3150/12-BEJSP10.full},
doi = {10.3150/12-BEJSP10},
language = {en},
number = {4},
urldate = {2021-08-06},
journal = {Bernoulli},
author = {Dai, Bin and Ding, Shilin and Wahba, Grace},
month = sep,
year = {2013},
file = {Dai et al. - 2013 - Multivariate Bernoulli distribution.pdf:/Users/oliviergimenez/Zotero/storage/KR3H3R5R/Dai et al. - 2013 - Multivariate Bernoulli distribution.pdf:application/pdf},
}
@Article{unmarkedFiske,
title = {{unmarked}: An {R} Package for Fitting Hierarchical Models
of Wildlife Occurrence and Abundance},
author = {Ian Fiske and Richard Chandler},
journal = {Journal of Statistical Software},
year = {2011},
volume = {43},
number = {10},
pages = {1--23},
url = {https://www.jstatsoft.org/v43/i10/},
}
@article{shao_transfer_2015,
title = {Transfer {Learning} for {Visual} {Categorization}: {A} {Survey}},
volume = {26},
issn = {2162-2388},
shorttitle = {Transfer {Learning} for {Visual} {Categorization}},
doi = {10.1109/TNNLS.2014.2330900},
abstract = {Regular machine learning and data mining techniques study the training data for future inferences under a major assumption that the future data are within the same feature space or have the same distribution as the training data. However, due to the limited availability of human labeled training data, training data that stay in the same feature space or have the same distribution as the future data cannot be guaranteed to be sufficient enough to avoid the over-fitting problem. In real-world applications, apart from data in the target domain, related data in a different domain can also be included to expand the availability of our prior knowledge about the target future data. Transfer learning addresses such cross-domain learning problems by extracting useful information from data in a related domain and transferring them for being used in target tasks. In recent years, with transfer learning being applied to visual categorization, some typical problems, e.g., view divergence in action recognition tasks and concept drifting in image classification tasks, can be efficiently solved. In this paper, we survey state-of-the-art transfer learning algorithms in visual categorization applications, such as object recognition, image classification, and human action recognition.},
number = {5},
journal = {IEEE Transactions on Neural Networks and Learning Systems},
author = {Shao, Ling and Zhu, Fan and Li, Xuelong},
month = may,
year = {2015},
note = {Conference Name: IEEE Transactions on Neural Networks and Learning Systems},
keywords = {Action recognition, Adaptation models, image classification, Knowledge transfer, Learning systems, machine learning, object recognition, survey, Testing, Training, Training data, transfer learning, visual categorization, visual categorization., Visualization},
pages = {1019--1034},
}
@article{shorten_survey_2019,
title = {A survey on {Image} {Data} {Augmentation} for {Deep} {Learning}},
volume = {6},
issn = {2196-1115},
url = {https://doi.org/10.1186/s40537-019-0197-0},
doi = {10.1186/s40537-019-0197-0},
abstract = {Deep convolutional neural networks have performed remarkably well on many Computer Vision tasks. However, these networks are heavily reliant on big data to avoid overfitting. Overfitting refers to the phenomenon when a network learns a function with very high variance such as to perfectly model the training data. Unfortunately, many application domains do not have access to big data, such as medical image analysis. This survey focuses on Data Augmentation, a data-space solution to the problem of limited data. Data Augmentation encompasses a suite of techniques that enhance the size and quality of training datasets such that better Deep Learning models can be built using them. The image augmentation algorithms discussed in this survey include geometric transformations, color space augmentations, kernel filters, mixing images, random erasing, feature space augmentation, adversarial training, generative adversarial networks, neural style transfer, and meta-learning. The application of augmentation methods based on GANs are heavily covered in this survey. In addition to augmentation techniques, this paper will briefly discuss other characteristics of Data Augmentation such as test-time augmentation, resolution impact, final dataset size, and curriculum learning. This survey will present existing methods for Data Augmentation, promising developments, and meta-level decisions for implementing Data Augmentation. Readers will understand how Data Augmentation can improve the performance of their models and expand limited datasets to take advantage of the capabilities of big data.},
number = {1},
urldate = {2021-08-08},
journal = {Journal of Big Data},
author = {Shorten, Connor and Khoshgoftaar, Taghi M.},
month = jul,
year = {2019},
keywords = {Big data, Data Augmentation, Deep Learning, GANs, Image data},
pages = {60},
file = {Full Text PDF:/Users/oliviergimenez/Zotero/storage/2EYJ9C5T/Shorten et Khoshgoftaar - 2019 - A survey on Image Data Augmentation for Deep Learn.pdf:application/pdf},
}
@inproceedings{he_deep_2016,
title = {Deep {Residual} {Learning} for {Image} {Recognition}},
doi = {10.1109/CVPR.2016.90},
abstract = {Deeper neural networks are more difficult to train. We present a residual learning framework to ease the training of networks that are substantially deeper than those used previously. We explicitly reformulate the layers as learning residual functions with reference to the layer inputs, instead of learning unreferenced functions. We provide comprehensive empirical evidence showing that these residual networks are easier to optimize, and can gain accuracy from considerably increased depth. On the ImageNet dataset we evaluate residual nets with a depth of up to 152 layers - 8× deeper than VGG nets [40] but still having lower complexity. An ensemble of these residual nets achieves 3.57\% error on the ImageNet test set. This result won the 1st place on the ILSVRC 2015 classification task. We also present analysis on CIFAR-10 with 100 and 1000 layers. The depth of representations is of central importance for many visual recognition tasks. Solely due to our extremely deep representations, we obtain a 28\% relative improvement on the COCO object detection dataset. Deep residual nets are foundations of our submissions to ILSVRC \& COCO 2015 competitions1, where we also won the 1st places on the tasks of ImageNet detection, ImageNet localization, COCO detection, and COCO segmentation.},
booktitle = {2016 {IEEE} {Conference} on {Computer} {Vision} and {Pattern} {Recognition} ({CVPR})},
author = {He, Kaiming and Zhang, Xiangyu and Ren, Shaoqing and Sun, Jian},
month = jun,
year = {2016},
note = {ISSN: 1063-6919},
keywords = {Complexity theory, Degradation, Image recognition, Image segmentation, Neural networks, Training, Visualization},
pages = {770--778},
}
@incollection{NEURIPS2019_9015,
title = {PyTorch: An Imperative Style, High-Performance Deep Learning Library},
author = {Paszke, Adam and Gross, Sam and Massa, Francisco and Lerer, Adam and Bradbury, James and Chanan, Gregory and Killeen, Trevor and Lin, Zeming and Gimelshein, Natalia and Antiga, Luca and Desmaison, Alban and Kopf, Andreas and Yang, Edward and DeVito, Zachary and Raison, Martin and Tejani, Alykhan and Chilamkurthy, Sasank and Steiner, Benoit and Fang, Lu and Bai, Junjie and Chintala, Soumith},
booktitle = {Advances in Neural Information Processing Systems 32},
editor = {H. Wallach and H. Larochelle and A. Beygelzimer and F. d\textquotesingle Alch\'{e}-Buc and E. Fox and R. Garnett},
pages = {8024--8035},
year = {2019},
publisher = {Curran Associates, Inc.},
url = {http://papers.neurips.cc/paper/9015-pytorch-an-imperative-style-high-performance-deep-learning-library.pdf}
}
@inproceedings{Yosinski2014,
author = {Yosinski, Jason and Clune, Jeff and Bengio, Yoshua and Lipson, Hod},
title = {How Transferable Are Features in Deep Neural Networks?},
year = {2014},
publisher = {MIT Press},
address = {Cambridge, MA, USA},
abstract = {Many deep neural networks trained on natural images exhibit a curious phenomenon in
common: on the first layer they learn features similar to Gabor filters and color
blobs. Such first-layer features appear not to be specific to a particular dataset
or task, but general in that they are applicable to many datasets and tasks. Features
must eventually transition from general to specific by the last layer of the network,
but this transition has not been studied extensively. In this paper we experimentally
quantify the generality versus specificity of neurons in each layer of a deep convolutional
neural network and report a few surprising results. Transferability is negatively
affected by two distinct issues: (1) the specialization of higher layer neurons to
their original task at the expense of performance on the target task, which was expected,
and (2) optimization difficulties related to splitting networks between co-adapted
neurons, which was not expected. In an example network trained on ImageNet, we demonstrate
that either of these two issues may dominate, depending on whether features are transferred
from the bottom, middle, or top of the network. We also document that the transferability
of features decreases as the distance between the base task and target task increases,
but that transferring features even from distant tasks can be better than using random
features. A final surprising result is that initializing a network with transferred
features from almost any number of layers can produce a boost to generalization that
lingers even after fine-tuning to the target dataset.},
booktitle = {Proceedings of the 27th International Conference on Neural Information Processing Systems - Volume 2},
pages = {3320–3328},
numpages = {9},
location = {Montreal, Canada},
series = {NIPS'14}
}
@article{Duggan2021,
author = {Duggan, Matthew T. and Groleau, Melissa F. and Shealy, Ethan P. and Self, Lillian S. and Utter, Taylor E. and Waller, Matthew M. and Hall, Bryan C. and Stone, Chris G. and Anderson, Layne L. and Mousseau, Timothy A.},
title = {An approach to rapid processing of camera trap images with minimal human input},
journal = {Ecology and Evolution},
year = {2021},
keywords = {camera trap, deep learning, neural network, transfer learning, wildlife ecology},
doi = {https://doi.org/10.1002/ece3.7970},
url = {https://onlinelibrary.wiley.com/doi/abs/10.1002/ece3.7970},
eprint = {https://onlinelibrary.wiley.com/doi/pdf/10.1002/ece3.7970},
abstract = {Abstract Camera traps have become an extensively utilized tool in ecological research, but the manual processing of images created by a network of camera traps rapidly becomes an overwhelming task, even for small camera trap studies. We used transfer learning to create convolutional neural network (CNN) models for identification and classification. By utilizing a small dataset with an average of 275 labeled images per species class, the model was able to distinguish between species and remove false triggers. We trained the model to detect 17 object classes with individual species identification, reaching an accuracy up to 92\% and an average F1 score of 85\%. Previous studies have suggested the need for thousands of images of each object class to reach results comparable to those achieved by human observers; however, we show that such accuracy can be achieved with fewer images. With transfer learning and an ongoing camera trap study, a deep learning model can be successfully created by a small camera trap study. A generalizable model produced from an unbalanced class set can be utilized to extract trap events that can later be confirmed by human processors.}
}
@article{clipp2021,
author = {Clipp, Hannah L. and Evans, Amber L. and Kessinger, Brin E. and Kellner, K. and Rota, Christopher T.},
title = {A penalized likelihood for multi-species occupancy models improves predictions of species interactions},
journal = {Ecology},
year = {2021}
}
@article{miller2011,
author = {Miller, David A. and Nichols, James D. and McClintock, Brett T. and Grant, Evan H. Campbell and Bailey, Larissa L. and Weir, Linda A.},
title = {Improving occupancy estimation when two types of observational error occur: non-detection and species misidentification},
journal = {Ecology},
volume = {92},
number = {7},
pages = {1422-1428},
keywords = {anuran censuses, call surveys, false positive detection, Lithobates spp., misclassification, misidentification, multiple states, presence–absence, proportion area occupied, site occupancy, species occurrence},
doi = {https://doi.org/10.1890/10-1396.1},
url = {https://esajournals.onlinelibrary.wiley.com/doi/abs/10.1890/10-1396.1},
eprint = {https://esajournals.onlinelibrary.wiley.com/doi/pdf/10.1890/10-1396.1},
abstract = {Efforts to draw inferences about species occurrence frequently account for false negatives, the common situation when individuals of a species are not detected even when a site is occupied. However, recent studies suggest the need to also deal with false positives, which occur when species are misidentified so that a species is recorded as detected when a site is unoccupied. Bias in estimators of occupancy, colonization, and extinction can be severe when false positives occur. Accordingly, we propose models that simultaneously account for both types of error. Our approach can be used to improve estimates of occupancy for study designs where a subset of detections is of a type or method for which false positives can be assumed to not occur. We illustrate properties of the estimators with simulations and data for three species of frogs. We show that models that account for possible misidentification have greater support (lower AIC for two species) and can yield substantially different occupancy estimates than those that do not. When the potential for misidentification exists, researchers should consider analytical techniques that can account for this source of error, such as those presented here.},
year = {2011}
}
@article{chambert2018,
author = {Chambert, Thierry and Grant, Evan H. Campbell and Miller, David A. W. and Nichols, James D. and Mulder, Kevin P. and Brand, Adrianne B.},
title = {Two-species occupancy modelling accounting for species misidentification and non-detection},
journal = {Methods in Ecology and Evolution},
volume = {9},
number = {6},
pages = {1468-1477},
keywords = {false positive, observation error, Plethodon cinereus, Plethodon shenandoah, site colonization, site extinction, species distribution modelling, species misidentification},
doi = {https://doi.org/10.1111/2041-210X.12985},
url = {https://besjournals.onlinelibrary.wiley.com/doi/abs/10.1111/2041-210X.12985},
eprint = {https://besjournals.onlinelibrary.wiley.com/doi/pdf/10.1111/2041-210X.12985},
abstract = {Abstract In occupancy studies, species misidentification can lead to false-positive detections, which can cause severe estimator biases. Currently, all models that account for false-positive errors only consider omnibus sources of false detections and are limited to single-species occupancy. However, false detections for a given species often occur because of the misidentification with another, closely related species. To exploit this explicit source of false-positive detection error, we develop a two-species occupancy model that accounts for misidentifications between two species of interest. As with other false-positive models, identifiability is greatly improved by the availability of unambiguous detections at a subset of site x occasions. Here, we consider the case where some of the field observations can be confirmed using laboratory or other independent identification methods (“confirmatory data”). We performed three simulation studies to (1) assess the model's performance under various realistic scenarios, (2) investigate the influence of the proportion of confirmatory data on estimator accuracy and (3) compare the performance of this two-species model with that of the single-species false-positive model. The model shows good performance under all scenarios, even when only small proportions of detections are confirmed (e.g. 5\%). It also clearly outperforms the single-species model. We illustrate application of this model using a 4-year dataset on two sympatric species of lungless salamanders: the US federally endangered Shenandoah salamander Plethodon shenandoah, and its presumed competitor, the red-backed salamander Plethodon cinereus. Occupancy of red-backed salamanders appeared very stable across the 4 years of study, whereas the Shenandoah salamander displayed substantial turnover in occupancy of forest habitats among years. Given the extent of species misidentification issues in occupancy studies, this modelling approach should help improve the reliability of estimates of species distribution, which is the goal of many studies and monitoring programmes. Further developments, to account for different forms of state uncertainty, can be readily undertaken under our general approach.},
year = {2018}
}