-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathkappa.rmd
61 lines (40 loc) · 1.36 KB
/
kappa.rmd
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
---
title: "Calculate Kappa"
output: html_notebook
---
Kappa calculations
```{r}
#Use the pairwise confusion matrices
library(caret)
#Create dataframe that will be changed later
#https://stats.stackexchange.com/questions/124001/what-is-the-intuition-behind-the-kappa-statistical-value-in-classification#124019
results.pm <- data.frame(pel = rep(LETTERS[1:2], each=10), mhi = rep(sample(LETTERS[1:2], 20, replace=T)))
pm.results <- table(results.pm)
#Change values of table to actual results from saved data on ONEDRIVE
pm.results[1,1] <- 8648
pm.results[1,2] <- 6352
pm.results[2,1] <- 5059
pm.results[2,2] <- 9941
#for PvsN
pm.results[1,1] <- 8509
pm.results[1,2] <- 6491
pm.results[2,1] <- 6619
pm.results[2,2] <- 8381
pm.cmat <- confusionMatrix(pm.results)
#The following is adapted from Titus et al 1984
#Proportion accurate
Pa = pm.cmat$overall[['Accuracy']] #double [[]] to get just the number instead of named number
#Proportion expected
Pe = pm.cmat$byClass[['Detection Prevalence']]
#Total number of events (whistles classified)
N = sum(pm.results)
#standard error of Kappa
stdERR <- sqrt((Pa*(1-Pa))/(N*(1-Pe)^2))
#Confidence intercals for Kappa
CI_low <- pm.cmat$overall[['Kappa']]-stdERR
CI_upper <- pm.cmat$overall[['Kappa']]+stdERR
#Approximate standard error of Kappa / Standard deviation
SEk <- sqrt(Pe/(N*(1-Pe)))
pm.cmat$overall[['Kappa']]/SEk
z.test()
```