Skip to content

Commit cb02efc

Browse files
committed
hmcdm 2.1.0
1 parent 80bd8e5 commit cb02efc

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

45 files changed

+2664
-3299
lines changed

.DS_Store

2 KB
Binary file not shown.

DESCRIPTION

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
Package: hmcdm
22
Type: Package
33
Title: Hidden Markov Cognitive Diagnosis Models for Learning
4-
Version: 2.1.0.9000
4+
Version: 2.1.0
55
Authors@R: c(
66
person("Susu", "Zhang", email = "szhan105@illinois.edu", role = c("aut")),
77
person("Shiyu", "Wang", email = "swang44@uga.edu", role = c("aut")),

NAMESPACE

Lines changed: 2 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -6,21 +6,15 @@ S3method(print,summary.hmcdm)
66
S3method(summary,hmcdm)
77
export(ETAmat)
88
export(OddsRatio)
9-
export(Q_list)
109
export(Q_list_g)
1110
export(TPmat)
1211
export(hmcdm)
1312
export(inv_bijectionvector)
1413
export(rOmega)
1514
export(random_Q)
16-
export(simDINA)
17-
export(simNIDA)
1815
export(sim_RT)
19-
export(simrRUM)
20-
export(simulate_alphas_FOHM)
21-
export(simulate_alphas_HO_joint)
22-
export(simulate_alphas_HO_sep)
23-
export(simulate_alphas_indept)
16+
export(sim_alphas)
17+
export(sim_hmcdm)
2418
import(bayesplot)
2519
import(rstantools)
2620
importFrom(Rcpp,evalCpp)

NEWS.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,7 @@
1+
# hmcdm 2.1.0
2+
3+
- Release of `hmcdm` package version 2.1.0.
4+
15
# hmcdm 2.0.0
26

37
- Release of `hmcdm` package version 2.0.0.

R/RcppExports.R

Lines changed: 209 additions & 435 deletions
Large diffs are not rendered by default.

R/Spatial_rotation_data.R

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -54,3 +54,17 @@
5454
#' @author Shiyu Wang, Yan Yang, Jeff Douglas, and Steve Culpepper
5555
'Test_order'
5656

57+
58+
#' Design array
59+
#'
60+
#' `Design_array` contains item administration information at all time points in the Spatial
61+
#' Rotation Learning Program.
62+
#' @format An array of dimension N-by-J-by-L, containing each subject's item administration.
63+
#' @details The data object `"Design_array"` contains an array of dimension N-by-J-by-L
64+
#' indicating the items assigned (1/0) to each subject at each time point.
65+
#' @source Spatial Rotation Learning Experiment at UIUC between Fall 2015 and Spring 2016.
66+
#' @author Shiyu Wang, Yan Yang, Jeff Douglas, and Steve Culpepper
67+
"Design_array"
68+
69+
70+

R/pp_check.R

Lines changed: 24 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
#' Zhang, S., Douglas, J. A., Wang, S. & Culpepper, S. A. (2019) <doi:10.1007/978-3-030-05584-4_24>
2121
#' @examples
2222
#' \donttest{
23-
#' output_FOHM = hmcdm(Y_real_array,Q_matrix,"DINA_FOHM",Test_order,Test_versions,10000,5000)
23+
#' output_FOHM = hmcdm(Y_real_array,Q_matrix,"DINA_FOHM",Design_array,1000,500)
2424
#' library(bayesplot)
2525
#' pp_check(output_FOHM)
2626
#' pp_check(output_FOHM, plotfun="hist", type="item_mean")
@@ -29,50 +29,47 @@
2929
pp_check.hmcdm <- function(object,plotfun="dens_overlay",type="total_score"){
3030
N <- dim(object$input_data$Response)[1]
3131
L <- dim(object$input_data$Response)[3]
32-
Jt <- dim(object$input_data$Response)[2]
33-
J <- Jt*L
32+
J <- dim(object$input_data$Response)[2]
3433

35-
Y_sim <- Dense2Sparse(object$input_data$Response, object$input_data$Test_order, object$input_data$Test_versions)
36-
Y_sim_array <- Sparse2Dense(Y_sim, object$input_data$Test_order, object$input_data$Test_versions)
37-
Test_order <- object$input_data$Test_order
38-
Test_versions <- object$input_data$Test_versions
39-
Q_matrix <- Array2Mat(object$input_data$Qs)
34+
Y_sim <- object$input_data$Response
35+
Design_array <- object$input_data$Design_array
36+
Q_matrix <- object$input_data$Q_matrix
4037
Q_examinee <- object$input_data$Q_examinee
38+
4139
Y_sim_collapsed <- matrix(NA,N,J)
4240
for(i in 1:N){
43-
test_i <- object$input_data$Test_versions[i]
4441
for(t in 1:L){
45-
t_i = object$input_data$Test_order[test_i,t]
46-
Y_sim_collapsed[i,(Jt*(t_i-1)+1):(Jt*t_i)] <- Y_sim_array[i,,t]
42+
block_it <- which(!is.na(Design_array[i,,t]))
43+
Y_sim_collapsed[i,block_it] <- Y_sim[i,block_it,t]
4744
}
4845
}
4946

5047
if(object$Model == "DINA_HO"){
51-
object_fit <- Learning_fit(object, "DINA_HO", Y_sim,Q_matrix,
52-
Test_order, Test_versions, Q_examinee)
48+
object_fit <- Learning_fit_g(object, "DINA_HO", Y_sim,Q_matrix,
49+
Design_array, Q_examinee)
5350
}
5451
if(object$Model == "DINA_HO_RT_sep" | object$Model == "DINA_HO_RT_joint"){
55-
L_sim <- Dense2Sparse(object$input_data$Latency, object$input_data$Test_order, object$input_data$Test_versions)
56-
object_fit <- Learning_fit(object,object$Model,Y_sim,Q_matrix,
57-
object$input_data$Test_order,object$input_data$Test_versions,
52+
L_sim <- object$input_data$Latency
53+
object_fit <- Learning_fit_g(object,object$Model,Y_sim,Q_matrix,
54+
Design_array,
5855
Q_examinee=object$input_data$Q_examinee,
59-
Latency_array = object$input_data$Latency, G_version = object$input_data$G_version)
56+
Latency_array = L_sim, G_version = object$input_data$G_version)
6057
}
6158
if(object$Model == "rRUM_indept" | object$Model == "NIDA_indept"){
62-
object_fit <- Learning_fit(object,object$Model,Y_sim,Q_matrix,
63-
object$input_data$Test_order,object$input_data$Test_versions,
59+
object_fit <- Learning_fit_g(object,object$Model,Y_sim,Q_matrix,
60+
Design_array,
6461
R=object$input_data$R)
6562
}
6663
if(object$Model == "DINA_FOHM"){
67-
object_fit <- Learning_fit(object,"DINA_FOHM",Y_sim,Q_matrix,
68-
object$input_data$Test_order,object$input_data$Test_versions)
64+
object_fit <- Learning_fit_g(object,"DINA_FOHM",Y_sim,Q_matrix,
65+
Design_array)
6966
}
7067

7168
if(type=="total_score"){
7269
## total score
7370
total_score_obs <- matrix(NA, N, L)
7471
for(t in 1:L){
75-
total_score_obs[,t] <- rowSums(object$input_data$Response[,,t])
72+
total_score_obs[,t] <- rowSums(object$input_data$Response[,,t], na.rm=TRUE)
7673
}
7774
obs <- rowSums(total_score_obs)
7875
pp <- apply(object_fit$PPs$total_score_PP,1,colSums)
@@ -81,7 +78,7 @@ pp_check.hmcdm <- function(object,plotfun="dens_overlay",type="total_score"){
8178
## Item means
8279
obs <- rep(NA, J)
8380
for(j in 1:J){
84-
obs[j] <- mean(Y_sim_collapsed[,j])
81+
obs[j] <- mean(Y_sim_collapsed[,j], na.rm=TRUE)
8582
}
8683
pp <- t(object_fit$PPs$item_mean_PP)
8784
}
@@ -98,27 +95,25 @@ pp_check.hmcdm <- function(object,plotfun="dens_overlay",type="total_score"){
9895
pp <- log(ORs_pp)
9996
}
10097
if(type=="latency_mean"){
101-
L_sim_array <- Sparse2Dense(L_sim, object$input_data$Test_order, object$input_data$Test_versions)
10298
L_sim_collapsed <- matrix(NA,N,J)
10399
for(i in 1:N){
104-
test_i <- object$input_data$Test_versions[i]
105100
for(t in 1:L){
106-
t_i = object$input_data$Test_order[test_i,t]
107-
L_sim_collapsed[i,(Jt*(t_i-1)+1):(Jt*t_i)] <- L_sim_array[i,,t]
101+
block_it <- which(!is.na(Design_array[i,,t]))
102+
L_sim_collapsed[i,block_it] <- L_sim[i,block_it,t]
108103
}
109104
}
110105
## latency means
111106
obs <- rep(NA, J)
112107
for(j in 1:J){
113-
obs[j] <- mean(L_sim_collapsed[,j])
108+
obs[j] <- mean(L_sim_collapsed[,j], na.rm=TRUE)
114109
}
115110
pp <- t(object_fit$PPs$RT_mean_PP)
116111
}
117112
if(type=="total_latency"){
118113
## total score
119114
total_latency_obs <- matrix(NA, N, L)
120115
for(t in 1:L){
121-
total_latency_obs[,t] <- rowSums(object$input_data$Latency[,,t])
116+
total_latency_obs[,t] <- rowSums(object$input_data$Latency[,,t], na.rm=TRUE)
122117
}
123118
obs <- rowSums(total_latency_obs)
124119
pp <- apply(object_fit$PPs$total_time_PP,1,colSums)

0 commit comments

Comments
 (0)