Skip to content

Commit 4d419bf

Browse files
committed
added dina independent
1 parent ed2cfb8 commit 4d419bf

File tree

7 files changed

+1736
-0
lines changed

7 files changed

+1736
-0
lines changed

.DS_Store

6 KB
Binary file not shown.

education/.DS_Store

6 KB
Binary file not shown.

education/bibliography.bib

+38
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,41 @@
1+
@article{Junker2001,
2+
year={2001},
3+
journal={Applied Psychological Measurement},
4+
title={Cognitive assessment models with few assumptions, and connections with nonparametric item response theory},
5+
author={Junker, Brian, and Sijtsma, Klaas },
6+
pages={258-272},
7+
volume = {25}
8+
}
9+
@book{tatsuoka1984,
10+
title={Analysis of errors in fraction addition and subtraction problems},
11+
author={Tatsuoka, Kikumi K},
12+
year={1984},
13+
publisher={Computer-based Education Research Laboratory, University of Illinois}
14+
}
15+
@article{de2009dina,
16+
title={DINA model and parameter estimation: A didactic},
17+
author={{de la Torre}, Jimmy},
18+
journal={Journal of Educational and Behavioral Statistics},
19+
volume={34},
20+
number={1},
21+
pages={115--130},
22+
year={2009}
23+
}
24+
@book{rupp:2010,
25+
Address = {New York},
26+
Author = {Rupp, A. A. and Templin, J. L. and Henson, R. A.},
27+
Publisher = {Guilford},
28+
Title = {Diagnostic assessment: Theory, methods, and applications},
29+
Year = 2010
30+
}
31+
@article{delatorre2004,
32+
year={2004},
33+
journal={Psychometrika},
34+
title={Higher-order latent trait models for cognitive diagnosis},
35+
author={Jimmy {de la torre} and Jeffrey A. Douglas },
36+
pages={333-353},
37+
volume = {69}
38+
}
139
@article{gelman2008weakly,
240
title={A weakly informative default prior distribution for logistic and other regression models},
341
author={Gelman, Andrew and Jakulin, Aleks and Pittau, Maria Grazia and Su, Yu-Sung},

education/dina_independent/dina_independent.Rmd

+468
Large diffs are not rendered by default.

education/dina_independent/dina_independent.html

+1,087
Large diffs are not rendered by default.
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,76 @@
1+
data{
2+
int<lower=1> I; // # of items
3+
int<lower=1> J; // # of respondents
4+
int<lower=1> K; // # of attributes
5+
int<lower=1> C; // # of attribute profiles (latent classes)
6+
matrix[J,I] y; // response matrix
7+
matrix[C,K] alpha; // attribute profile matrix
8+
matrix[I,C] xi; // the global attribute mastery indicator (product of alpha^q-element)
9+
}
10+
11+
parameters{
12+
row_vector<lower=0,upper=1>[K] eta; // probabilities of each attribute mastery
13+
real<lower=0,upper=1> slip[I]; // slip parameter
14+
real<lower=0,upper=1> guess[I]; // guess parameter
15+
}
16+
17+
transformed parameters{
18+
simplex[C] nu; // probabilities of latent class membership
19+
vector[C] log_nu;
20+
for (c in 1:C){
21+
nu[c] = 1;
22+
for (k in 1:K){
23+
nu[c] = nu[c] * eta[k]^alpha[c,k]
24+
* (1 - eta[k])^(1 - alpha[c,k]);
25+
}
26+
}
27+
log_nu = log(nu);
28+
}
29+
30+
model{
31+
real ps[C]; // temp for log component densities
32+
real pi;
33+
real log_items[I];
34+
slip ~ beta(5,25);
35+
guess ~ beta(5,25);
36+
for (j in 1:J){
37+
for (c in 1:C){
38+
for (i in 1:I){
39+
pi = (1 - slip[i])^xi[i,c] * guess[i]^(1 - xi[i,c]);
40+
log_items[i] = y[j,i] * log(pi)
41+
+ (1 - y[j,i]) * log(1 - pi);
42+
}
43+
ps[c] = log_nu[c] + sum(log_items);
44+
}
45+
target += log_sum_exp(ps);
46+
}
47+
48+
}
49+
50+
generated quantities {
51+
matrix[J,C] prob_resp_class; // posterior probabilities of respondent j being in latent class c
52+
matrix[J,K] prob_resp_attr; // posterior probabilities of respondent j being a master of attribute k
53+
real pi;
54+
real log_items[I];
55+
row_vector[C] prob_joint;
56+
real prob_attr_class[C];
57+
for (j in 1:J){
58+
for (c in 1:C){
59+
for (i in 1:I){
60+
pi = (1 - slip[i])^xi[i,c] * guess[i]^(1 - xi[i,c]);
61+
log_items[i] = y[j,i] * log(pi)
62+
+ (1 - y[j,i]) * log(1 - pi);
63+
}
64+
prob_joint[c] = nu[c] * exp(sum(log_items));
65+
}
66+
prob_resp_class[j] = prob_joint/sum(prob_joint);
67+
}
68+
for (j in 1:J){
69+
for (k in 1:K){
70+
for (c in 1:C){
71+
prob_attr_class[c] = prob_resp_class[j,c] * alpha[c,k];
72+
}
73+
prob_resp_attr[j,k] = sum(prob_attr_class);
74+
}
75+
}
76+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,67 @@
1+
data{
2+
int<lower=1> I; // # of items
3+
int<lower=1> J; // # of respondents
4+
int<lower=1> K; // # of attributes
5+
int<lower=1> C; // # of attribute profiles (latent classes)
6+
matrix[J,I] y; // response matrix
7+
matrix[C,K] alpha; // attribute profile matrix
8+
matrix[I,C] xi; // the global attribute mastery indicator (product of alpha^q-element)
9+
}
10+
11+
parameters{
12+
simplex[C] nu; // probabilities of latent class membership
13+
real<lower=0,upper=1> slip[I]; // slip parameter
14+
real<lower=0,upper=1> guess[I]; // guess parameter
15+
}
16+
17+
transformed parameters{
18+
vector[C] log_nu;
19+
log_nu = log(nu);
20+
}
21+
22+
model{
23+
real ps[C]; // temp for log component densities
24+
real pi;
25+
real log_items[I];
26+
slip ~ beta(5,25);
27+
guess ~ beta(5,25);
28+
for (j in 1:J){
29+
for (c in 1:C){
30+
for (i in 1:I){
31+
pi = (1 - slip[i])^xi[i,c] * guess[i]^(1 - xi[i,c]);
32+
log_items[i] = y[j,i] * log(pi)
33+
+ (1 - y[j,i]) * log(1 - pi);
34+
}
35+
ps[c] = log_nu[c] + sum(log_items);
36+
}
37+
target += log_sum_exp(ps);
38+
}
39+
}
40+
41+
generated quantities {
42+
matrix[J,C] prob_resp_class; // posterior probabilities of respondent j being in latent class c
43+
matrix[J,K] prob_resp_attr; // posterior probabilities of respondent j being a master of attribute k
44+
real pi;
45+
real log_items[I];
46+
row_vector[C] prob_joint;
47+
real prob_attr_class[C];
48+
for (j in 1:J){
49+
for (c in 1:C){
50+
for (i in 1:I){
51+
pi = (1 - slip[i])^xi[i,c] * guess[i]^(1 - xi[i,c]);
52+
log_items[i] = y[j,i] * log(pi)
53+
+ (1 - y[j,i]) * log(1 - pi);
54+
}
55+
prob_joint[c] = nu[c] * exp(sum(log_items));
56+
}
57+
prob_resp_class[j] = prob_joint/sum(prob_joint);
58+
}
59+
for (j in 1:J){
60+
for (k in 1:K){
61+
for (c in 1:C){
62+
prob_attr_class[c] = prob_resp_class[j,c] * alpha[c,k];
63+
}
64+
prob_resp_attr[j,k] = sum(prob_attr_class);
65+
}
66+
}
67+
}

0 commit comments

Comments
 (0)