764 lines
27 KiB
R
764 lines
27 KiB
R
# Mixed ANOVA Analysis for Domain Means
|
||
# EOHI Experiment Data Analysis - Domain Level Analysis
|
||
# Variables: NPast_mean_pref, NPast_mean_pers, NPast_mean_val, NPast_mean_life
|
||
# NFut_mean_pref, NFut_mean_pers, NFut_mean_val, NFut_mean_life
|
||
|
||
library(tidyverse)
|
||
library(ez)
|
||
library(car)
|
||
library(nortest) # For normality tests
|
||
library(emmeans) # For post-hoc comparisons
|
||
library(purrr) # For map functions
|
||
library(effsize) # For Cohen's d calculations
|
||
library(effectsize) # For effect size calculations
|
||
library(ggplot2) # For plotting
|
||
|
||
options(scipen = 999)
|
||
|
||
options(contrasts = c("contr.sum", "contr.poly"))
|
||
|
||
setwd("C:/Users/irina/Documents/DND/EOHI/eohi1")
|
||
|
||
# Read the data
|
||
data <- read.csv("exp1.csv")
|
||
|
||
required_vars <- c("NPast_mean_pref", "NPast_mean_pers", "NPast_mean_val", "NPast_mean_life",
|
||
"NFut_mean_pref", "NFut_mean_pers", "NFut_mean_val", "NFut_mean_life")
|
||
|
||
# Define domain mapping
|
||
domain_mapping <- data.frame(
|
||
variable = c("NPast_mean_pref", "NPast_mean_pers", "NPast_mean_val", "NPast_mean_life",
|
||
"NFut_mean_pref", "NFut_mean_pers", "NFut_mean_val", "NFut_mean_life"),
|
||
time = c(rep("Past", 4), rep("Future", 4)),
|
||
domain = rep(c("Preferences", "Personality", "Values", "Life"), 2),
|
||
stringsAsFactors = FALSE
|
||
)
|
||
|
||
long_data <- data %>%
|
||
select(pID, ResponseId, TEMPORAL_DO, all_of(required_vars)) %>%
|
||
pivot_longer(
|
||
cols = all_of(required_vars),
|
||
names_to = "variable",
|
||
values_to = "MEAN_DIFFERENCE"
|
||
) %>%
|
||
left_join(domain_mapping, by = "variable") %>%
|
||
# Convert to factors with proper levels (note: columns are 'time' and 'domain' from mapping)
|
||
mutate(
|
||
TIME = factor(time, levels = c("Past", "Future")),
|
||
DOMAIN = factor(domain, levels = c("Preferences", "Personality", "Values", "Life")),
|
||
pID = as.factor(pID),
|
||
TEMPORAL_DO = as.factor(TEMPORAL_DO)
|
||
) %>%
|
||
# Select final columns and remove any rows with missing values
|
||
select(pID, ResponseId, TEMPORAL_DO, TIME, DOMAIN, MEAN_DIFFERENCE) %>%
|
||
filter(!is.na(MEAN_DIFFERENCE))
|
||
|
||
# Overall descriptive statistics by TIME and DOMAIN
|
||
desc_stats <- long_data %>%
|
||
group_by(TIME, DOMAIN) %>%
|
||
summarise(
|
||
n = n(),
|
||
mean = round(mean(MEAN_DIFFERENCE, na.rm = TRUE), 5),
|
||
variance = round(var(MEAN_DIFFERENCE, na.rm = TRUE), 5),
|
||
sd = round(sd(MEAN_DIFFERENCE, na.rm = TRUE), 5),
|
||
median = round(median(MEAN_DIFFERENCE, na.rm = TRUE), 5),
|
||
q1 = round(quantile(MEAN_DIFFERENCE, 0.25, na.rm = TRUE), 5),
|
||
q3 = round(quantile(MEAN_DIFFERENCE, 0.75, na.rm = TRUE), 5),
|
||
min = round(min(MEAN_DIFFERENCE, na.rm = TRUE), 5),
|
||
max = round(max(MEAN_DIFFERENCE, na.rm = TRUE), 5),
|
||
.groups = 'drop'
|
||
)
|
||
|
||
print(desc_stats)
|
||
|
||
# Descriptive statistics by between-subjects factors
|
||
desc_stats_by_temporal <- long_data %>%
|
||
group_by(TEMPORAL_DO, TIME, DOMAIN) %>%
|
||
summarise(
|
||
n = n(),
|
||
mean = round(mean(MEAN_DIFFERENCE, na.rm = TRUE), 5),
|
||
variance = round(var(MEAN_DIFFERENCE, na.rm = TRUE), 5),
|
||
sd = round(sd(MEAN_DIFFERENCE, na.rm = TRUE), 5),
|
||
.groups = 'drop'
|
||
)
|
||
|
||
print(desc_stats_by_temporal)
|
||
|
||
# ASSUMPTION TESTING
|
||
|
||
# 1. Missing values check
|
||
missing_summary <- long_data %>%
|
||
group_by(TIME, DOMAIN) %>%
|
||
summarise(
|
||
n_total = n(),
|
||
n_missing = sum(is.na(MEAN_DIFFERENCE)),
|
||
pct_missing = round(100 * n_missing / n_total, 2),
|
||
.groups = 'drop'
|
||
)
|
||
|
||
print(missing_summary)
|
||
|
||
# Create clean dataset (long_data is already filtered for NA values)
|
||
long_data_clean <- long_data
|
||
|
||
# 2. Outlier detection
|
||
outlier_summary <- long_data_clean %>%
|
||
group_by(TIME, DOMAIN) %>%
|
||
summarise(
|
||
n = n(),
|
||
mean = mean(MEAN_DIFFERENCE),
|
||
sd = sd(MEAN_DIFFERENCE),
|
||
q1 = quantile(MEAN_DIFFERENCE, 0.25),
|
||
median = median(MEAN_DIFFERENCE),
|
||
q3 = quantile(MEAN_DIFFERENCE, 0.75),
|
||
iqr = q3 - q1,
|
||
lower_bound = q1 - 1.5 * iqr,
|
||
upper_bound = q3 + 1.5 * iqr,
|
||
n_outliers = sum(MEAN_DIFFERENCE < lower_bound | MEAN_DIFFERENCE > upper_bound),
|
||
.groups = 'drop'
|
||
)
|
||
|
||
print(outlier_summary)
|
||
|
||
# 3. Anderson-Darling normality test (streamlined)
|
||
normality_results <- long_data_clean %>%
|
||
group_by(TIME, DOMAIN) %>%
|
||
summarise(
|
||
n = n(),
|
||
ad_statistic = ad.test(.data$MEAN_DIFFERENCE)$statistic,
|
||
ad_p_value = ad.test(.data$MEAN_DIFFERENCE)$p.value,
|
||
.groups = 'drop'
|
||
)
|
||
|
||
print(normality_results)
|
||
|
||
# 4. Homogeneity of variance (Levene's test)
|
||
# Test homogeneity across TIME within each DOMAIN
|
||
homogeneity_time <- long_data_clean %>%
|
||
group_by(DOMAIN) %>%
|
||
summarise(
|
||
levene_F = leveneTest(MEAN_DIFFERENCE ~ TIME)$`F value`[1],
|
||
levene_p = leveneTest(MEAN_DIFFERENCE ~ TIME)$`Pr(>F)`[1],
|
||
.groups = 'drop'
|
||
)
|
||
|
||
print(homogeneity_time)
|
||
|
||
# Test homogeneity across DOMAIN within each TIME
|
||
homogeneity_domain <- long_data_clean %>%
|
||
group_by(TIME) %>%
|
||
summarise(
|
||
levene_F = leveneTest(MEAN_DIFFERENCE ~ DOMAIN)$`F value`[1],
|
||
levene_p = leveneTest(MEAN_DIFFERENCE ~ DOMAIN)$`Pr(>F)`[1],
|
||
.groups = 'drop'
|
||
)
|
||
|
||
print(homogeneity_domain)
|
||
|
||
# HARTLEY'S F-MAX TEST WITH BOOTSTRAP CRITICAL VALUES
|
||
|
||
# Function to calculate Hartley's F-max ratio
|
||
calculate_hartley_ratio <- function(variances) {
|
||
max(variances, na.rm = TRUE) / min(variances, na.rm = TRUE)
|
||
}
|
||
|
||
# For mixed ANOVA: Test homogeneity across BETWEEN-SUBJECTS factor (TEMPORAL_DO)
|
||
# within each combination of within-subjects factors (TIME × DOMAIN)
|
||
|
||
print(unique(long_data_clean$TEMPORAL_DO))
|
||
print(table(long_data_clean$TEMPORAL_DO))
|
||
|
||
observed_temporal_ratios <- long_data_clean %>%
|
||
group_by(TIME, DOMAIN) %>%
|
||
summarise(
|
||
# Calculate variances for each TEMPORAL_DO level within this TIME × DOMAIN combination
|
||
past_var = var(MEAN_DIFFERENCE[TEMPORAL_DO == "01PAST"], na.rm = TRUE),
|
||
fut_var = var(MEAN_DIFFERENCE[TEMPORAL_DO == "02FUT"], na.rm = TRUE),
|
||
# Calculate F-max ratio
|
||
f_max_ratio = max(past_var, fut_var) / min(past_var, fut_var),
|
||
.groups = 'drop'
|
||
) %>%
|
||
select(TIME, DOMAIN, past_var, fut_var, f_max_ratio)
|
||
|
||
print(observed_temporal_ratios)
|
||
|
||
# More efficient bootstrap function for Hartley's F-max test
|
||
bootstrap_hartley_critical <- function(data, group_var, response_var, n_iter = 1000) {
|
||
# Get unique groups and their sample sizes
|
||
groups <- unique(data[[group_var]])
|
||
|
||
# Calculate observed variances for each group
|
||
observed_vars <- data %>%
|
||
dplyr::group_by(!!rlang::sym(group_var)) %>%
|
||
dplyr::summarise(var = var(!!rlang::sym(response_var), na.rm = TRUE), .groups = 'drop') %>%
|
||
dplyr::pull(var)
|
||
|
||
# Handle invalid variances
|
||
if(any(observed_vars <= 0 | is.na(observed_vars))) {
|
||
observed_vars[observed_vars <= 0 | is.na(observed_vars)] <- 1e-10
|
||
}
|
||
|
||
# Calculate observed F-max ratio
|
||
observed_ratio <- max(observed_vars) / min(observed_vars)
|
||
|
||
# Pre-allocate storage for bootstrap ratios
|
||
bootstrap_ratios <- numeric(n_iter)
|
||
|
||
# Get group data once
|
||
group_data_list <- map(groups, ~ {
|
||
group_data <- data[data[[group_var]] == .x, response_var]
|
||
group_data[!is.na(group_data)]
|
||
})
|
||
|
||
# Bootstrap with pre-allocated storage
|
||
for(i in 1:n_iter) {
|
||
# Bootstrap sample from each group independently
|
||
sample_vars <- map_dbl(group_data_list, ~ {
|
||
bootstrap_sample <- sample(.x, size = length(.x), replace = TRUE)
|
||
var(bootstrap_sample, na.rm = TRUE)
|
||
})
|
||
bootstrap_ratios[i] <- max(sample_vars) / min(sample_vars)
|
||
}
|
||
|
||
# Remove invalid ratios
|
||
valid_ratios <- bootstrap_ratios[is.finite(bootstrap_ratios) & !is.na(bootstrap_ratios)]
|
||
|
||
if(length(valid_ratios) == 0) {
|
||
stop("No valid bootstrap ratios generated")
|
||
}
|
||
|
||
# Calculate critical value (95th percentile)
|
||
critical_95 <- quantile(valid_ratios, 0.95, na.rm = TRUE)
|
||
|
||
# Return only essential information
|
||
return(list(
|
||
observed_ratio = observed_ratio,
|
||
critical_95 = critical_95,
|
||
n_valid_iterations = length(valid_ratios)
|
||
))
|
||
}
|
||
|
||
# Hartley's F-max test across TEMPORAL_DO within each TIME × DOMAIN combination
|
||
set.seed(123) # For reproducibility
|
||
|
||
hartley_temporal_results <- long_data_clean %>%
|
||
group_by(TIME, DOMAIN) %>%
|
||
summarise(
|
||
hartley_result = list(bootstrap_hartley_critical(pick(TEMPORAL_DO, MEAN_DIFFERENCE), "TEMPORAL_DO", "MEAN_DIFFERENCE")),
|
||
.groups = 'drop'
|
||
) %>%
|
||
mutate(
|
||
observed_ratio = map_dbl(hartley_result, ~ .x$observed_ratio),
|
||
critical_95 = map_dbl(hartley_result, ~ .x$critical_95),
|
||
significant = observed_ratio > critical_95
|
||
) %>%
|
||
select(TIME, DOMAIN, observed_ratio, critical_95, significant)
|
||
|
||
print(hartley_temporal_results)
|
||
|
||
# MIXED ANOVA ANALYSIS
|
||
|
||
# Check for complete cases
|
||
complete_cases <- sum(complete.cases(long_data_clean))
|
||
print(complete_cases)
|
||
|
||
# Check if design is balanced
|
||
design_balance <- table(long_data_clean$pID, long_data_clean$TIME, long_data_clean$DOMAIN)
|
||
if(all(design_balance %in% c(0, 1))) {
|
||
print("Design is balanced: each participant has data for all TIME × DOMAIN combinations")
|
||
} else {
|
||
print("Warning: Design is unbalanced")
|
||
print(summary(as.vector(design_balance)))
|
||
}
|
||
|
||
# MIXED ANOVA WITH SPHERICITY CORRECTIONS
|
||
|
||
# Mixed ANOVA using ezANOVA with automatic sphericity corrections
|
||
# Between-subjects: TEMPORAL_DO (2 levels: 01PAST, 02FUT)
|
||
# Within-subjects: TIME (2 levels: Past, Future) × DOMAIN (4 levels: Preferences, Personality, Values, Life)
|
||
mixed_anova_model <- ezANOVA(data = long_data_clean,
|
||
dv = MEAN_DIFFERENCE,
|
||
wid = pID,
|
||
between = TEMPORAL_DO,
|
||
within = .(TIME, DOMAIN),
|
||
type = 3,
|
||
detailed = TRUE)
|
||
|
||
anova_output <- mixed_anova_model$ANOVA
|
||
rownames(anova_output) <- NULL # Reset row numbers to be sequential
|
||
print(anova_output)
|
||
|
||
# Show Mauchly's test for sphericity
|
||
print(mixed_anova_model$Mauchly)
|
||
|
||
# Show sphericity-corrected results (Greenhouse-Geisser and Huynh-Feldt)
|
||
if(!is.null(mixed_anova_model$`Sphericity Corrections`)) {
|
||
print(mixed_anova_model$`Sphericity Corrections`)
|
||
|
||
# Extract and display corrected degrees of freedom
|
||
|
||
sphericity_corr <- mixed_anova_model$`Sphericity Corrections`
|
||
anova_table <- mixed_anova_model$ANOVA
|
||
|
||
corrected_df <- data.frame(
|
||
Effect = sphericity_corr$Effect,
|
||
Original_DFn = anova_table$DFn[match(sphericity_corr$Effect, anova_table$Effect)],
|
||
Original_DFd = anova_table$DFd[match(sphericity_corr$Effect, anova_table$Effect)],
|
||
GG_DFn = anova_table$DFn[match(sphericity_corr$Effect, anova_table$Effect)] * sphericity_corr$GGe,
|
||
GG_DFd = anova_table$DFd[match(sphericity_corr$Effect, anova_table$Effect)] * sphericity_corr$GGe,
|
||
HF_DFn = anova_table$DFn[match(sphericity_corr$Effect, anova_table$Effect)] * sphericity_corr$HFe,
|
||
HF_DFd = anova_table$DFd[match(sphericity_corr$Effect, anova_table$Effect)] * sphericity_corr$HFe,
|
||
GG_epsilon = sphericity_corr$GGe,
|
||
HF_epsilon = sphericity_corr$HFe
|
||
)
|
||
|
||
print(corrected_df)
|
||
|
||
# Between-subjects effects (no sphericity corrections needed)
|
||
between_effects <- c("TEMPORAL_DO")
|
||
for(effect in between_effects) {
|
||
if(effect %in% anova_table$Effect) {
|
||
f_value <- anova_table$F[anova_table$Effect == effect]
|
||
dfn <- anova_table$DFn[anova_table$Effect == effect]
|
||
dfd <- anova_table$DFd[anova_table$Effect == effect]
|
||
p_value <- anova_table$p[anova_table$Effect == effect]
|
||
|
||
cat(sprintf("%s: F(%d, %d) = %.3f, p = %.6f\n", effect, dfn, dfd, f_value, p_value))
|
||
}
|
||
}
|
||
|
||
# Within-subjects effects (sphericity corrections where applicable)
|
||
|
||
# TIME main effect (2 levels, sphericity automatically satisfied)
|
||
if("TIME" %in% anova_table$Effect) {
|
||
f_value <- anova_table$F[anova_table$Effect == "TIME"]
|
||
dfn <- anova_table$DFn[anova_table$Effect == "TIME"]
|
||
dfd <- anova_table$DFd[anova_table$Effect == "TIME"]
|
||
p_value <- anova_table$p[anova_table$Effect == "TIME"]
|
||
cat(sprintf("TIME: F(%d, %d) = %.3f, p = %.6f (2 levels, sphericity satisfied)\n", dfn, dfd, f_value, p_value))
|
||
}
|
||
|
||
# DOMAIN main effect (4 levels, needs sphericity correction)
|
||
if("DOMAIN" %in% anova_table$Effect) {
|
||
f_value <- anova_table$F[anova_table$Effect == "DOMAIN"]
|
||
dfn <- anova_table$DFn[anova_table$Effect == "DOMAIN"]
|
||
dfd <- anova_table$DFd[anova_table$Effect == "DOMAIN"]
|
||
p_value <- anova_table$p[anova_table$Effect == "DOMAIN"]
|
||
cat(sprintf("DOMAIN: F(%d, %d) = %.3f, p = %.6f\n", dfn, dfd, f_value, p_value))
|
||
}
|
||
|
||
# Interactions with sphericity corrections
|
||
for(i in seq_len(nrow(corrected_df))) {
|
||
effect <- corrected_df$Effect[i]
|
||
f_value <- anova_table$F[match(effect, anova_table$Effect)]
|
||
|
||
cat(sprintf("\n%s:\n", effect))
|
||
cat(sprintf(" Original: F(%d, %d) = %.3f\n",
|
||
corrected_df$Original_DFn[i], corrected_df$Original_DFd[i], f_value))
|
||
cat(sprintf(" GG-corrected: F(%.2f, %.2f) = %.3f, p = %.6f\n",
|
||
corrected_df$GG_DFn[i], corrected_df$GG_DFd[i], f_value, sphericity_corr$`p[GG]`[i]))
|
||
cat(sprintf(" HF-corrected: F(%.2f, %.2f) = %.3f, p = %.6f\n",
|
||
corrected_df$HF_DFn[i], corrected_df$HF_DFd[i], f_value, sphericity_corr$`p[HF]`[i]))
|
||
}
|
||
} else {
|
||
print("\nNote: Sphericity corrections not needed (sphericity assumption met)")
|
||
}
|
||
|
||
# COHEN'S D FOR MAIN EFFECTS
|
||
|
||
# Create aov model for emmeans
|
||
aov_model <- aov(MEAN_DIFFERENCE ~ TEMPORAL_DO * TIME * DOMAIN + Error(pID/(TIME * DOMAIN)),
|
||
data = long_data_clean)
|
||
|
||
# Main Effect of TIME
|
||
time_emmeans <- emmeans(aov_model, ~ TIME)
|
||
print(time_emmeans)
|
||
|
||
time_main_contrast <- pairs(time_emmeans, adjust = "Bonferroni")
|
||
|
||
time_main_df <- as.data.frame(time_main_contrast)
|
||
print(time_main_df)
|
||
|
||
# Calculate Cohen's d for TIME main effect
|
||
if(nrow(time_main_df) > 0) {
|
||
cat("\nCohen's d for TIME main effect:\n")
|
||
time_past_data <- long_data_clean$MEAN_DIFFERENCE[long_data_clean$TIME == "Past"]
|
||
time_future_data <- long_data_clean$MEAN_DIFFERENCE[long_data_clean$TIME == "Future"]
|
||
|
||
time_cohens_d <- cohen.d(time_past_data, time_future_data)
|
||
cat(sprintf("Past vs Future: n1 = %d, n2 = %d\n", length(time_past_data), length(time_future_data)))
|
||
cat(sprintf("Cohen's d: %.5f\n", time_cohens_d$estimate))
|
||
cat(sprintf("Effect size interpretation: %s\n", time_cohens_d$magnitude))
|
||
cat(sprintf("p-value: %.5f\n", time_main_df$p.value[1]))
|
||
}
|
||
|
||
# Main Effect of DOMAIN (significant: p < 0.001)
|
||
domain_emmeans <- emmeans(aov_model, ~ DOMAIN)
|
||
print(domain_emmeans)
|
||
domain_main_contrast <- pairs(domain_emmeans, adjust = "bonferroni")
|
||
domain_main_df <- as.data.frame(domain_main_contrast)
|
||
print(domain_main_df)
|
||
|
||
# Calculate Cohen's d for significant DOMAIN contrasts
|
||
significant_domain <- domain_main_df[domain_main_df$p.value < 0.05, ]
|
||
if(nrow(significant_domain) > 0) {
|
||
cat("\nCohen's d for significant DOMAIN contrasts:\n")
|
||
for(i in seq_len(nrow(significant_domain))) {
|
||
contrast_name <- as.character(significant_domain$contrast[i])
|
||
contrast_parts <- strsplit(contrast_name, " - ")[[1]]
|
||
if(length(contrast_parts) == 2) {
|
||
level1 <- trimws(contrast_parts[1])
|
||
level2 <- trimws(contrast_parts[2])
|
||
|
||
data1 <- long_data_clean$MEAN_DIFFERENCE[long_data_clean$DOMAIN == level1]
|
||
data2 <- long_data_clean$MEAN_DIFFERENCE[long_data_clean$DOMAIN == level2]
|
||
|
||
if(length(data1) > 0 && length(data2) > 0) {
|
||
domain_cohens_d <- cohen.d(data1, data2)
|
||
cat(sprintf("Comparison: %s\n", contrast_name))
|
||
cat(sprintf(" n1 = %d, n2 = %d\n", length(data1), length(data2)))
|
||
cat(sprintf(" Cohen's d: %.5f\n", domain_cohens_d$estimate))
|
||
cat(sprintf(" Effect size interpretation: %s\n", domain_cohens_d$magnitude))
|
||
cat(sprintf(" p-value: %.5f\n", significant_domain$p.value[i]))
|
||
cat("\n")
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
# SIMPLE EFFECTS AND PAIRWISE COMPARISONS FOR INTERACTIONS
|
||
|
||
# 1. TEMPORAL_DO × TIME INTERACTION - Simple effects of TIME within each TEMPORAL_DO
|
||
temporal_time_emmeans <- emmeans(aov_model, ~ TIME | TEMPORAL_DO)
|
||
temporal_time_simple <- pairs(temporal_time_emmeans, adjust = "bonferroni")
|
||
print(temporal_time_simple)
|
||
|
||
# 2. TIME × DOMAIN INTERACTION - Simple effects of DOMAIN within each TIME
|
||
time_domain_emmeans <- emmeans(aov_model, ~ DOMAIN | TIME)
|
||
time_domain_simple <- pairs(time_domain_emmeans, adjust = "bonferroni")
|
||
print(time_domain_simple)
|
||
|
||
# COHEN'S D FOR SIGNIFICANT TWO-WAY INTERACTIONS
|
||
|
||
# Function to calculate Cohen's d for pairwise comparisons
|
||
calculate_cohens_d_for_pairs <- function(pairs_df, data, group1_var, group2_var, response_var) {
|
||
significant_pairs <- pairs_df[pairs_df$p.value < 0.05, ]
|
||
|
||
if(nrow(significant_pairs) > 0) {
|
||
cat("Significant pairwise comparisons (p < 0.05):\n")
|
||
print(significant_pairs)
|
||
|
||
cat("\nCohen's d calculated from raw data:\n")
|
||
|
||
for(i in seq_len(nrow(significant_pairs))) {
|
||
comparison <- significant_pairs[i, ]
|
||
contrast_name <- as.character(comparison$contrast)
|
||
|
||
# Parse the contrast
|
||
contrast_parts <- strsplit(contrast_name, " - ")[[1]]
|
||
if(length(contrast_parts) == 2) {
|
||
level1 <- trimws(contrast_parts[1])
|
||
level2 <- trimws(contrast_parts[2])
|
||
|
||
# Get raw data for both conditions
|
||
if(group2_var %in% colnames(comparison)) {
|
||
group2_level <- as.character(comparison[[group2_var]])
|
||
data1 <- data[[response_var]][
|
||
data[[group1_var]] == level1 &
|
||
data[[group2_var]] == group2_level]
|
||
|
||
data2 <- data[[response_var]][
|
||
data[[group1_var]] == level2 &
|
||
data[[group2_var]] == group2_level]
|
||
} else {
|
||
data1 <- data[[response_var]][data[[group1_var]] == level1]
|
||
data2 <- data[[response_var]][data[[group1_var]] == level2]
|
||
}
|
||
|
||
if(length(data1) > 0 && length(data2) > 0) {
|
||
# Calculate Cohen's d using effsize package
|
||
cohens_d_result <- cohen.d(data1, data2)
|
||
|
||
cat(sprintf("Comparison: %s", contrast_name))
|
||
if(group2_var %in% colnames(comparison)) {
|
||
cat(sprintf(" | %s", group2_level))
|
||
}
|
||
cat(sprintf("\n n1 = %d, n2 = %d\n", length(data1), length(data2)))
|
||
cat(sprintf(" Cohen's d: %.5f\n", cohens_d_result$estimate))
|
||
cat(sprintf(" Effect size interpretation: %s\n", cohens_d_result$magnitude))
|
||
cat(sprintf(" p-value: %.5f\n", comparison$p.value))
|
||
cat("\n")
|
||
}
|
||
}
|
||
}
|
||
} else {
|
||
cat("No significant pairwise comparisons found.\n")
|
||
}
|
||
}
|
||
|
||
# 1. TEMPORAL_DO × TIME INTERACTION
|
||
|
||
# Get simple effects of TIME within each TEMPORAL_DO
|
||
temporal_time_simple_df <- as.data.frame(temporal_time_simple)
|
||
calculate_cohens_d_for_pairs(temporal_time_simple_df, long_data_clean, "TIME", "TEMPORAL_DO", "MEAN_DIFFERENCE")
|
||
|
||
# 2. TIME × DOMAIN INTERACTION
|
||
|
||
# Get simple effects of DOMAIN within each TIME
|
||
time_domain_simple_df <- as.data.frame(time_domain_simple)
|
||
calculate_cohens_d_for_pairs(time_domain_simple_df, long_data_clean, "DOMAIN", "TIME", "MEAN_DIFFERENCE")
|
||
|
||
# INTERACTION PLOTS
|
||
|
||
# Define color palettes
|
||
time_colors <- c("Past" = "#648FFF", "Future" = "#DC267F")
|
||
domain_colors <- c("Preferences" = "#648FFF", "Personality" = "#DC267F",
|
||
"Values" = "#FFB000", "Life" = "#FE6100")
|
||
|
||
# Define TIME levels (Past, Future order)
|
||
time_levels <- c("Past", "Future")
|
||
|
||
# ============================================================
|
||
# PLOT 1: TEMPORAL_DO × TIME INTERACTION
|
||
# ============================================================
|
||
|
||
# Create estimated marginal means for TEMPORAL_DO × TIME
|
||
emm_temporal_time <- emmeans(aov_model, ~ TEMPORAL_DO * TIME)
|
||
|
||
# Prepare emmeans data frame
|
||
emmeans_temporal_time <- emm_temporal_time %>%
|
||
as.data.frame() %>%
|
||
filter(!is.na(lower.CL) & !is.na(upper.CL) & !is.na(emmean)) %>%
|
||
rename(
|
||
ci_lower = lower.CL,
|
||
ci_upper = upper.CL,
|
||
plot_mean = emmean
|
||
) %>%
|
||
mutate(
|
||
TEMPORAL_DO = factor(TEMPORAL_DO, levels = c("01PAST", "02FUT")),
|
||
TIME = factor(TIME, levels = time_levels)
|
||
)
|
||
|
||
# Prepare raw data for plotting with position offsets
|
||
dodge_width_temporal <- 0.5
|
||
iPlot_temporal <- long_data_clean %>%
|
||
dplyr::select(pID, TEMPORAL_DO, TIME, MEAN_DIFFERENCE) %>%
|
||
mutate(
|
||
TEMPORAL_DO = factor(TEMPORAL_DO, levels = c("01PAST", "02FUT")),
|
||
TIME = factor(TIME, levels = time_levels),
|
||
x_pos = as.numeric(TEMPORAL_DO),
|
||
time_offset = (as.numeric(TIME) - 1.5) * dodge_width_temporal,
|
||
x_dodged = x_pos + time_offset
|
||
)
|
||
|
||
emmeans_temporal_time <- emmeans_temporal_time %>%
|
||
mutate(
|
||
x_pos = as.numeric(TEMPORAL_DO),
|
||
time_offset = (as.numeric(TIME) - 1.5) * dodge_width_temporal,
|
||
x_dodged = x_pos + time_offset
|
||
)
|
||
|
||
# Create TEMPORAL_DO × TIME interaction plot
|
||
interaction_plot_temporal <- ggplot() +
|
||
geom_violin(
|
||
data = iPlot_temporal,
|
||
aes(x = x_dodged, y = MEAN_DIFFERENCE, fill = TIME, group = interaction(x_pos, TIME)),
|
||
alpha = 0.4,
|
||
color = NA,
|
||
trim = FALSE,
|
||
scale = "width",
|
||
width = dodge_width_temporal / 2
|
||
) +
|
||
geom_errorbar(
|
||
data = emmeans_temporal_time,
|
||
aes(x = x_dodged, ymin = ci_lower, ymax = ci_upper),
|
||
width = 0.08,
|
||
linewidth = 0.8,
|
||
color = "black"
|
||
) +
|
||
geom_point(
|
||
data = emmeans_temporal_time,
|
||
aes(x = x_dodged, y = plot_mean, fill = TIME, shape = TIME),
|
||
size = 4,
|
||
stroke = 1,
|
||
color = "black"
|
||
) +
|
||
labs(
|
||
x = "Order",
|
||
y = "Mean absolute difference from present",
|
||
title = "TEMPORAL_DO × TIME Interaction"
|
||
) +
|
||
scale_x_continuous(
|
||
breaks = c(1, 2),
|
||
labels = c("Past First", "Future First"),
|
||
limits = c(0.5, 2.5)
|
||
) +
|
||
scale_fill_manual(name = "TIME", values = time_colors) +
|
||
scale_shape_manual(name = "TIME", values = c(21, 22)) +
|
||
theme_minimal(base_size = 13) +
|
||
theme(
|
||
axis.text = element_text(size = 11),
|
||
axis.title = element_text(size = 12),
|
||
plot.title = element_text(size = 14, hjust = 0.5),
|
||
legend.position = "right",
|
||
legend.title = element_text(size = 11),
|
||
legend.text = element_text(size = 10),
|
||
panel.grid.major.x = element_blank(),
|
||
panel.grid.minor = element_blank(),
|
||
panel.border = element_rect(color = "gray80", fill = NA, linewidth = 0.5)
|
||
)
|
||
|
||
print(interaction_plot_temporal)
|
||
|
||
# ============================================================
|
||
# PLOT 2: TIME × DOMAIN INTERACTION (DOMAIN on x-axis)
|
||
# ============================================================
|
||
|
||
# Create estimated marginal means for TIME × DOMAIN
|
||
emm_time_domain <- emmeans(aov_model, ~ TIME * DOMAIN)
|
||
|
||
# Prepare emmeans data frame
|
||
emmeans_time_domain <- emm_time_domain %>%
|
||
as.data.frame() %>%
|
||
filter(!is.na(lower.CL) & !is.na(upper.CL) & !is.na(emmean)) %>%
|
||
rename(
|
||
ci_lower = lower.CL,
|
||
ci_upper = upper.CL,
|
||
plot_mean = emmean
|
||
) %>%
|
||
mutate(
|
||
DOMAIN = factor(DOMAIN, levels = c("Preferences", "Personality", "Values", "Life")),
|
||
TIME = factor(TIME, levels = time_levels)
|
||
)
|
||
|
||
# Prepare raw data for plotting with position offsets
|
||
dodge_width_domain <- 0.5
|
||
iPlot_domain <- long_data_clean %>%
|
||
dplyr::select(pID, DOMAIN, TIME, MEAN_DIFFERENCE) %>%
|
||
mutate(
|
||
DOMAIN = factor(DOMAIN, levels = c("Preferences", "Personality", "Values", "Life")),
|
||
TIME = factor(TIME, levels = time_levels),
|
||
x_pos = as.numeric(DOMAIN),
|
||
time_offset = (as.numeric(TIME) - 1.5) * dodge_width_domain,
|
||
x_dodged = x_pos + time_offset
|
||
)
|
||
|
||
emmeans_time_domain <- emmeans_time_domain %>%
|
||
mutate(
|
||
x_pos = as.numeric(DOMAIN),
|
||
time_offset = (as.numeric(TIME) - 1.5) * dodge_width_domain,
|
||
x_dodged = x_pos + time_offset
|
||
)
|
||
|
||
# Create TIME × DOMAIN interaction plot
|
||
interaction_plot_domain <- ggplot() +
|
||
geom_violin(
|
||
data = iPlot_domain,
|
||
aes(x = x_dodged, y = MEAN_DIFFERENCE, fill = TIME, group = interaction(x_pos, TIME)),
|
||
alpha = 0.4,
|
||
color = NA,
|
||
trim = FALSE,
|
||
scale = "width",
|
||
width = dodge_width_domain / 2
|
||
) +
|
||
geom_errorbar(
|
||
data = emmeans_time_domain,
|
||
aes(x = x_dodged, ymin = ci_lower, ymax = ci_upper),
|
||
width = 0.08,
|
||
linewidth = 0.8,
|
||
color = "black"
|
||
) +
|
||
geom_point(
|
||
data = emmeans_time_domain,
|
||
aes(x = x_dodged, y = plot_mean, fill = TIME, shape = TIME),
|
||
size = 4,
|
||
stroke = 1,
|
||
color = "black"
|
||
) +
|
||
labs(
|
||
x = "Domain",
|
||
y = "Mean absolute difference from present",
|
||
title = "TIME × DOMAIN Interaction"
|
||
) +
|
||
scale_x_continuous(
|
||
breaks = c(1, 2, 3, 4),
|
||
labels = c("Preferences", "Personality", "Values", "Life"),
|
||
limits = c(0.5, 4.5)
|
||
) +
|
||
scale_fill_manual(name = "TIME", values = time_colors) +
|
||
scale_shape_manual(name = "TIME", values = c(21, 22)) +
|
||
theme_minimal(base_size = 13) +
|
||
theme(
|
||
axis.text = element_text(size = 11),
|
||
axis.text.x = element_text(angle = 45, hjust = 1),
|
||
axis.title = element_text(size = 12),
|
||
plot.title = element_text(size = 14, hjust = 0.5),
|
||
legend.position = "right",
|
||
legend.title = element_text(size = 11),
|
||
legend.text = element_text(size = 10),
|
||
panel.grid.major.x = element_blank(),
|
||
panel.grid.minor = element_blank(),
|
||
panel.border = element_rect(color = "gray80", fill = NA, linewidth = 0.5)
|
||
)
|
||
|
||
print(interaction_plot_domain)
|
||
|
||
# ============================================================
|
||
# PLOT 3: TEMPORAL_DO × TIME INTERACTION (Emmeans only)
|
||
# ============================================================
|
||
|
||
# Reuse emmeans data from Plot 1
|
||
emmeans_temporal_time_simple <- emmeans_temporal_time %>%
|
||
mutate(
|
||
TEMPORAL_DO_label = factor(
|
||
TEMPORAL_DO,
|
||
levels = c("01PAST", "02FUT"),
|
||
labels = c("Past First", "Future First")
|
||
),
|
||
x_pos = as.numeric(TEMPORAL_DO),
|
||
time_offset = (as.numeric(TIME) - 1.5) * 0.3,
|
||
x_dodged = x_pos + time_offset
|
||
)
|
||
|
||
# Create simple emmeans-only plot
|
||
interaction_plot_emmeans_only <- ggplot(emmeans_temporal_time_simple) +
|
||
geom_errorbar(
|
||
aes(x = x_dodged, ymin = ci_lower, ymax = ci_upper, color = TIME),
|
||
width = 0.1,
|
||
linewidth = 1,
|
||
alpha = 0.8
|
||
) +
|
||
geom_point(
|
||
aes(x = x_dodged, y = plot_mean, fill = TIME, shape = TIME),
|
||
size = 5,
|
||
stroke = 1.2,
|
||
color = "black"
|
||
) +
|
||
labs(
|
||
x = "Order",
|
||
y = "Mean absolute difference from present",
|
||
title = "TEMPORAL_DO × TIME Interaction (Estimated Marginal Means)"
|
||
) +
|
||
scale_x_continuous(
|
||
breaks = c(1, 2),
|
||
labels = c("Past First", "Future First"),
|
||
limits = c(0.5, 2.5)
|
||
) +
|
||
scale_color_manual(name = "TIME", values = time_colors) +
|
||
scale_fill_manual(name = "TIME", values = time_colors) +
|
||
scale_shape_manual(name = "TIME", values = c(21, 22)) +
|
||
theme_minimal(base_size = 13) +
|
||
theme(
|
||
axis.text = element_text(size = 11),
|
||
axis.title = element_text(size = 12),
|
||
plot.title = element_text(size = 14, hjust = 0.5),
|
||
legend.position = "right",
|
||
legend.title = element_text(size = 11),
|
||
legend.text = element_text(size = 10),
|
||
panel.grid.major.x = element_blank(),
|
||
panel.grid.minor = element_blank(),
|
||
panel.border = element_rect(color = "gray80", fill = NA, linewidth = 0.5)
|
||
)
|
||
|
||
print(interaction_plot_emmeans_only)
|