Skip to content

Commit

Permalink
fix n_factors and styler
Browse files Browse the repository at this point in the history
  • Loading branch information
DominiqueMakowski committed Dec 23, 2018
1 parent 5ad67a4 commit 4f60a2b
Show file tree
Hide file tree
Showing 154 changed files with 870 additions and 950 deletions.
1 change: 1 addition & 0 deletions NAMESPACE
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ S3method(analyze,lmerModLmerTest)
S3method(analyze,principal)
S3method(analyze,stanreg)
S3method(as.data.frame,density)
S3method(find_best_model,lavaan)
S3method(find_best_model,lmerModLmerTest)
S3method(find_best_model,stanreg)
S3method(find_combinations,formula)
Expand Down
28 changes: 13 additions & 15 deletions R/analyze.anova.R
Original file line number Diff line number Diff line change
Expand Up @@ -11,30 +11,29 @@
#' @examples
#' \dontrun{
#' library(psycho)
#'
#'
#' df <- psycho::affective
#'
#'
#' x <- aov(df$Tolerating ~ df$Salary)
#' x <- aov(df$Tolerating ~ df$Salary * df$Sex)
#'
#'
#' x <- anova(lm(df$Tolerating ~ df$Salary * df$Sex))
#'
#'
#'
#'
#' summary(analyze(x))
#' print(analyze(x))
#'
#'
#' df <- psycho::emotion %>%
#' mutate(Recall = ifelse(Recall == TRUE, 1, 0)) %>%
#' group_by(Participant_ID, Emotion_Condition) %>%
#' summarise(Recall = sum(Recall) / n())
#'
#' x <- aov(Recall ~ Emotion_Condition + Error(Participant_ID), data=df)
#' x <- anova(lmerTest::lmer(Recall ~ Emotion_Condition + (1|Participant_ID), data=df))
#'
#' x <- aov(Recall ~ Emotion_Condition + Error(Participant_ID), data = df)
#' x <- anova(lmerTest::lmer(Recall ~ Emotion_Condition + (1 | Participant_ID), data = df))
#' analyze(x)
#' summary(x)
#' }
#'
#'
#'
#' @references
#' \itemize{
#' \item{Levine, T. R., & Hullett, C. R. (2002). Eta squared, partial eta squared, and misreporting of effect size in communication research. Human Communication Research, 28(4), 612-625.}
Expand Down Expand Up @@ -289,14 +288,13 @@ analyze.aovlist <- analyze.aov
#'
#' @examples
#' library(psycho)
#'
#'
#' df <- psycho::affective
#'
#'
#' x <- aov(df$Tolerating ~ df$Salary)
#' x <- aov(df$Tolerating ~ df$Salary * df$Sex)
#'
#'
#' omega_sq(x)
#'
#' @seealso http://stats.stackexchange.com/a/126520
#'
#' @author Arnoud Plantinga
Expand Down
127 changes: 68 additions & 59 deletions R/analyze.blavaan.R
Original file line number Diff line number Diff line change
Expand Up @@ -12,16 +12,12 @@
#' @examples
#' library(psycho)
#' library(lavaan)
#'
#' model <- ' visual =~ x1 + x2 + x3
#' textual =~ x4 + x5 + x6
#' speed =~ x7 + x8 + x9 '
#' x <- lavaan::cfa(model, data=HolzingerSwineford1939)
#'
#'
#' model <- " visual =~ x1 + x2 + x3\ntextual =~ x4 + x5 + x6\nspeed =~ x7 + x8 + x9 "
#' x <- lavaan::cfa(model, data = HolzingerSwineford1939)
#'
#' rez <- analyze(x)
#' print(rez)
#'
#'
#' @author \href{https://dominiquemakowski.github.io/}{Dominique Makowski}
#'
#' @seealso
Expand All @@ -30,14 +26,14 @@
#' @importFrom lavaan parameterEstimates fitmeasures
#'
#' @export
analyze.blavaan <- function(x, CI=90, standardize=FALSE,...) {
analyze.blavaan <- function(x, CI = 90, standardize = FALSE, ...) {
fit <- x


# Processing
# -------------
values <- list()
values$CI = CI
values$CI <- CI

# Fit measures
values$Fit_Measures <- interpret_lavaan(fit)
Expand All @@ -47,14 +43,16 @@ analyze.blavaan <- function(x, CI=90, standardize=FALSE,...) {
# -------------
computations <- .get_info_computations(fit)
fitmeasures <- values$Fit_Measures$text
text <- paste0("A Bayesian model was fitted (",
computations,
"). The fit indices are as following: ",
fitmeasures)
text <- paste0(
"A Bayesian model was fitted (",
computations,
"). The fit indices are as following: ",
fitmeasures
)

# Summary
# -------------
summary <- .summary_blavaan(fit, CI=CI, standardize=standardize)
summary <- .summary_blavaan(fit, CI = CI, standardize = standardize)

# Plot
# -------------
Expand All @@ -74,30 +72,36 @@ analyze.blavaan <- function(x, CI=90, standardize=FALSE,...) {
#' @keywords internal
.get_info_computations <- function(fit) {
chains <- blavaan::blavInspect(fit, "n.chains")
sample = fit@external$sample
warmup = fit@external$burnin
text = paste0(chains,
" chains, each with iter = ",
sample,
"; warmup = ",
warmup)
sample <- fit@external$sample
warmup <- fit@external$burnin
text <- paste0(
chains,
" chains, each with iter = ",
sample,
"; warmup = ",
warmup
)
return(text)
}




#' @keywords internal
.process_blavaan <- function(fit, standardize=FALSE, CI=90){
.process_blavaan <- function(fit, standardize = FALSE, CI = 90) {
# Get relevant rows
PE <- parameterEstimates(fit, se = FALSE, ci=FALSE, remove.eq = FALSE, remove.system.eq = TRUE,
remove.ineq = FALSE, remove.def = FALSE,
add.attributes = TRUE)
if(!("group" %in% names(PE))) PE$group <- 1
PE <- parameterEstimates(fit,
se = FALSE, ci = FALSE, remove.eq = FALSE, remove.system.eq = TRUE,
remove.ineq = FALSE, remove.def = FALSE,
add.attributes = TRUE
)
if (!("group" %in% names(PE))) PE$group <- 1
newpt <- fit@ParTable
pte2 <- which(newpt$free > 0)
relevant_rows <- match(with(newpt, paste(lhs[pte2], op[pte2], rhs[pte2], group[pte2], sep="")),
paste(PE$lhs, PE$op, PE$rhs, PE$group, sep=""))
relevant_rows <- match(
with(newpt, paste(lhs[pte2], op[pte2], rhs[pte2], group[pte2], sep = "")),
paste(PE$lhs, PE$op, PE$rhs, PE$group, sep = "")
)

# Priors
priors <- rep(NA, nrow(PE))
Expand All @@ -108,12 +112,12 @@ analyze.blavaan <- function(x, CI=90, standardize=FALSE,...) {


# Posterior
if(standardize == FALSE){
if (standardize == FALSE) {
posteriors <- blavaan::blavInspect(fit, "draws") %>%
as.matrix() %>%
as.data.frame()
names(posteriors) <- names(lavaan::coef(fit))
} else{
} else {
posteriors <- blavaan::standardizedposterior(fit) %>%
as.data.frame()
}
Expand All @@ -127,7 +131,7 @@ analyze.blavaan <- function(x, CI=90, standardize=FALSE,...) {
Effect <- c()
CI_lower <- c()
CI_higher <- c()
for(effect in names(posteriors)){
for (effect in names(posteriors)) {
posterior <- posteriors[[effect]]
Effect <- c(Effect, effect)
MPE <- c(MPE, mpe(posterior)$MPE)
Expand All @@ -137,10 +141,9 @@ analyze.blavaan <- function(x, CI=90, standardize=FALSE,...) {
CI_values <- HDI(posterior, prob = CI / 100)
CI_lower <- c(CI_lower, CI_values$values$HDImin)
CI_higher <- c(CI_higher, CI_values$values$HDImax)

}

if(standardize == FALSE){
if (standardize == FALSE) {
Effects <- rep(NA, nrow(PE))
Effects[relevant_rows] <- Effect
MPEs <- rep(NA, nrow(PE))
Expand All @@ -153,7 +156,7 @@ analyze.blavaan <- function(x, CI=90, standardize=FALSE,...) {
CI_lowers[relevant_rows] <- CI_lower
CI_highers <- rep(NA, nrow(PE))
CI_highers[relevant_rows] <- CI_higher
} else{
} else {
Effects <- Effect
MPEs <- MPE
Medians <- Median
Expand All @@ -162,45 +165,51 @@ analyze.blavaan <- function(x, CI=90, standardize=FALSE,...) {
CI_highers <- CI_higher
}

data <- data.frame("Effect" = Effects,
"Median" = Medians,
"MAD" = MADs,
"MPE" = MPEs,
"CI_lower" = CI_lowers,
"CI_higher" = CI_highers,
"Prior" = priors)
data <- data.frame(
"Effect" = Effects,
"Median" = Medians,
"MAD" = MADs,
"MPE" = MPEs,
"CI_lower" = CI_lowers,
"CI_higher" = CI_highers,
"Prior" = priors
)

return(data)
}



#' @keywords internal
.summary_blavaan <- function(fit, CI=90, standardize=FALSE){

solution <- lavaan::parameterEstimates(fit, se = TRUE, ci=TRUE, standardized=FALSE, level = CI/100)
.summary_blavaan <- function(fit, CI = 90, standardize = FALSE) {
solution <- lavaan::parameterEstimates(fit, se = TRUE, ci = TRUE, standardized = FALSE, level = CI / 100)

solution <- solution %>%
rename("From" = "rhs",
"To" = "lhs",
"Operator" = "op",
"Coef" = "est",
"SE" = "se",
"CI_lower" = "ci.lower",
"CI_higher" = "ci.upper") %>%
rename(
"From" = "rhs",
"To" = "lhs",
"Operator" = "op",
"Coef" = "est",
"SE" = "se",
"CI_lower" = "ci.lower",
"CI_higher" = "ci.upper"
) %>%
mutate(Type = dplyr::case_when(
Operator == "=~" ~ "Loading",
Operator == "~" ~ "Regression",
Operator == "~" ~ "Regression",
Operator == "~~" ~ "Correlation",
TRUE ~ NA_character_)) %>%
TRUE ~ NA_character_
)) %>%
select(one_of(c("To", "Operator", "From", "Type"))) %>%
mutate_("Effect" = "as.character(paste0(To, Operator, From))") %>%
full_join(.process_blavaan(fit, CI=CI, standardize=standardize) %>%
mutate_("Effect" = "as.character(Effect)"), by="Effect") %>%
full_join(.process_blavaan(fit, CI = CI, standardize = standardize) %>%
mutate_("Effect" = "as.character(Effect)"), by = "Effect") %>%
select_("-Effect") %>%
mutate_("Median" = "replace_na(Median, 1)",
"MAD" = "replace_na(MAD, 0)",
"MPE" = "replace_na(MPE, 100)") %>%
mutate_(
"Median" = "replace_na(Median, 1)",
"MAD" = "replace_na(MAD, 0)",
"MPE" = "replace_na(MPE, 100)"
) %>%
select(one_of(c("From", "Operator", "To", "Median", "MAD", "CI_lower", "CI_higher", "MPE", "Prior", "Type"))) %>%
dplyr::filter_("Operator != '~1'")

Expand Down
34 changes: 12 additions & 22 deletions R/analyze.fa.R
Original file line number Diff line number Diff line change
Expand Up @@ -12,15 +12,13 @@
#' @examples
#' library(psycho)
#' library(psych)
#'
#'
#' x <- psych::fa(psych::Thurstone.33, 2)
#'
#'
#' results <- analyze(x)
#' print(results)
#' summary(results)
#' plot(results)
#'
#'
#' @author \href{https://dominiquemakowski.github.io/}{Dominique Makowski}
#'
#' @export
Expand Down Expand Up @@ -109,11 +107,11 @@ analyze.fa <- function(x, labels = NULL, treshold = "max", ...) {
#' @examples
#' \dontrun{
#' library(psycho)
#'
#'
#' x <- psych::fa(psych::Thurstone.33, 2)
#' format_loadings(x)
#' }
#'
#'
#' @import dplyr
#' @export
format_loadings <- function(x, labels = NULL) {
Expand Down Expand Up @@ -163,12 +161,11 @@ format_loadings <- function(x, labels = NULL) {
#' @examples
#' \dontrun{
#' library(psycho)
#'
#'
#' x <- psych::fa(psych::Thurstone.33, 2)
#' get_loadings_max(format_loadings(x)$loadings)
#' }
#'
#'
#'
#' @import dplyr
#' @export
get_loadings_max <- function(loadings) {
Expand All @@ -194,14 +191,13 @@ get_loadings_max <- function(loadings) {
#' @examples
#' \dontrun{
#' library(psycho)
#'
#'
#' x <- psych::fa(psych::Thurstone.33, 2)
#' loadings <- format_loadings(x)$loadings
#' get_cfa_model(loadings, treshold="max")
#' get_cfa_model(loadings, treshold=0.1)
#' get_cfa_model(loadings, treshold = "max")
#' get_cfa_model(loadings, treshold = 0.1)
#' }
#'
#'
#'
#' @import dplyr
#' @export
get_cfa_model <- function(loadings, treshold = "max") {
Expand Down Expand Up @@ -240,12 +236,11 @@ get_cfa_model <- function(loadings, treshold = "max") {
#' @examples
#' \dontrun{
#' library(psycho)
#'
#'
#' x <- psych::fa(psych::Thurstone.33, 2)
#' plot_loadings(format_loadings(x)$loadings)
#' }
#'
#'
#'
#' @import dplyr
#' @export
plot_loadings <- function(loadings) {
Expand All @@ -265,8 +260,3 @@ plot_loadings <- function(loadings) {

return(p)
}





Loading

0 comments on commit 4f60a2b

Please sign in to comment.