remove(list = ls())
<- c("#78D9C5","#F5BE5E","#EEB6E9","#DBDB73","#FFED98","#BFD2EB") palette
#source raw data directory: data_raw and data included
source("../../../isolation_trajectories_data_path.R")
library(MplusAutomation)
library(stringr)
library(stringi)
library(filesstrings)
library(texreg)
library(relimp)
library(rhdf5)
library(knitr)
library(ggplot2)
library(tidyverse)
library(plyr) #conflicts with tidyverse for e.g. rename and row_number
This page displays the results from the 3-step sensitivity analysis used to assess how social isolation trajectories are associated with outcomes and antecedents while accounting for classification errors in class trajectories. From this sensitivity analysis we conclude that there was very limited difference in accounting for class classification error rates (the results were replicated), therefore the hard classification was presented in the manuscript.
To run the following code more clearly, I have created these functions:
<- function(data){
add.significance.variable <- data %>%
data mutate(
Significance =
if_else(
< 0.05,
p 1,
0
%>%
) recode_factor(
"0" = "Non-significant",
"1" = "Significant"
))return(data)
}
In Mplus, to assess the association with antecedents, I manually added the code to account for Logits for the Classification Probabilities in the original GMM.
%OVERALL%
C ON seswq35 P5CACORNCategoryrecoded vndngdm5 socprbm5 nchildren00e5
schmeals00e5 harm3em5recoded tsibl5 tssupm5 actvm5 nobiodl5 anyviom5 warme5 fdepmm5
bfiom5 bficm5 bfiem5 bfiam5 bfinm5 antisocialparent alcoholismparent iqe5 exfunce5
tomtote5 totexte5 intisoe5 totadde5 totproe5 ;
%C#1%
[class#1@5.092]; [class#2@-0.194];
%C#2%
[class#1@2.193]; [class#2@4.204];
%C#3%
[class#1@-2.233]; [class#2@-5.270];
READING IN THE DATA IN RMD DOES NOT PROVIDE THE RIGHT INFORMATION FOR THE ANTECEDENTS - need to look at the Mplus .out file You can only access the unstandardised paratimisations in R. To find the results, go to the section: “ODDS RATIO FOR THE ALTERNATIVE PARAMETERIZATIONS FOR THE CATEGORICAL LATENT VARIABLE REGRESSION: Parameterization using Reference Class 1”.For now I have manually copied over the correct table needed.
ODDS RATIO FOR THE ALTERNATIVE PARAMETERIZATIONS FOR THE CATEGORICAL LATENT VARIABLE REGRESSION
(Est. - 1) Two-Tailed
Estimate S.E. / S.E. P-Value
Parameterization using Reference Class 1
C#2 ON
SESWQ35 0.886 0.247 -0.463 0.643
P5CACORNCA 1.199 0.477 0.418 0.676
VNDNGDM5 0.855 0.102 -1.417 0.157
SOCPRBM5 1.139 0.144 0.965 0.335
NCHILDREN0 1.000 0.001 0.315 0.753
SCHMEALS00 1.000 0.015 -0.028 0.978
HARM3EM5RE 1.160 0.526 0.304 0.761
TSIBL5 0.848 0.128 -1.185 0.236
TSSUPM5 0.981 0.026 -0.729 0.466
ACTVM5 0.867 0.076 -1.746 0.081
NOBIODL5 1.241 0.670 0.359 0.719
ANYVIOM5 0.742 0.271 -0.950 0.342
WARME5 0.981 0.160 -0.119 0.906
FDEPMM5 1.093 0.425 0.220 0.826
BFIOM5 0.996 0.030 -0.119 0.905
BFICM5 0.994 0.034 -0.163 0.871
BFIEM5 1.010 0.038 0.251 0.802
BFIAM5 0.988 0.028 -0.433 0.665
BFINM5 0.989 0.032 -0.358 0.721
ANTISOCIAL 1.000 0.010 -0.043 0.966
ALCOHOLISM 0.967 0.022 -1.521 0.128
IQE5 1.003 0.012 0.289 0.772
EXFUNCE5 1.098 0.052 1.895 0.058
TOMTOTE5 0.936 0.045 -1.427 0.154
TOTEXTE5 1.020 0.014 1.399 0.162
INTISOE5 1.056 0.035 1.603 0.109
TOTADDE5 1.064 0.016 3.948 0.000
TOTPROE5 1.000 0.030 0.002 0.998
C#3 ON
SESWQ35 1.000 0.391 -0.001 1.000
P5CACORNCA 0.286 0.244 -2.933 0.003
VNDNGDM5 0.851 0.158 -0.944 0.345
SOCPRBM5 0.907 0.206 -0.453 0.651
NCHILDREN0 1.004 0.002 2.406 0.016
SCHMEALS00 1.029 0.024 1.221 0.222
HARM3EM5RE 1.300 0.631 0.476 0.634
TSIBL5 1.216 0.161 1.341 0.180
TSSUPM5 0.964 0.040 -0.911 0.362
ACTVM5 1.019 0.105 0.181 0.856
NOBIODL5 0.876 0.532 -0.233 0.816
ANYVIOM5 0.704 0.411 -0.720 0.472
WARME5 1.160 0.220 0.727 0.467
FDEPMM5 1.187 0.548 0.342 0.733
BFIOM5 1.092 0.037 2.493 0.013
BFICM5 0.981 0.030 -0.648 0.517
BFIEM5 0.946 0.044 -1.205 0.228
BFIAM5 1.004 0.046 0.094 0.925
BFINM5 1.001 0.054 0.018 0.985
ANTISOCIAL 1.003 0.012 0.258 0.796
ALCOHOLISM 1.008 0.036 0.233 0.816
IQE5 0.986 0.016 -0.869 0.385
EXFUNCE5 0.886 0.046 -2.505 0.012
TOMTOTE5 1.020 0.075 0.271 0.786
TOTEXTE5 1.023 0.020 1.153 0.249
INTISOE5 1.389 0.092 4.238 0.000
TOTADDE5 1.056 0.021 2.643 0.008
TOTPROE5 0.908 0.033 -2.755 0.006
We conducted the outcome associations two ways. First, the Auxillary DE3STEP method, then the more recently developed BCH approach.
First we need to read in the output file from Mplus, then extract all the means per variable.
<- readModels(target = paste0(mplus_outcome_output_3step), recursive = TRUE) outcome.model
# extract overall means for each class per variable
<- outcome.model[["X.Users.katiethompson.Documents.PhD.LISS.DTP_Louise_and_Tim.Social.isolation.trajectories_Paper.1.data_analysis.mplus.GMM.clustered.full_sample.3step.output.outcomes..isolation_3traj_full_sample_clustered_3STEP_DE3STEP_outcomes.out"]][["lcCondMeans"]][["overall"]]
outcome.model_overall_means
colnames(outcome.model_overall_means) <- c("Variable",
"Low stable class mean",
"Low stable class standard error",
"Increasing class mean",
"Increasing class standard error",
"Decreasing class mean",
"Decreasing class standard error",
"Chi square",
"df",
"p value")
outcome.model_overall_means
We are not interested in comparison from class 2 to 3, ware only interested in the comparisons to the low stable class (class 1). Significant results have been presented here, split by increasing and decreasing class, respectively.
We also ran the analysis using the “DU3STEP” method in Mplus. But the DU3STEP option showed errors such as: PROBLEMS OCCURRED DURING THE ESTIMATION FOR THE DISTAL OUTCOME NEETE18. THE VARIANCE OF THE DISTAL OUTCOME IN CLASS 1 IS ZERO. THE PROBLEM CAN BE RESOLVED BY USING THE DE3STEP OPTION. Therefore the DE3STEP option was used instead
<- outcome.model[["X.Users.katiethompson.Documents.PhD.LISS.DTP_Louise_and_Tim.Social.isolation.trajectories_Paper.1.data_analysis.mplus.GMM.clustered.full_sample.3step.output.outcomes..isolation_3traj_full_sample_clustered_3STEP_DE3STEP_outcomes.out"]][["lcCondMeans"]][["pairwise"]]
outcome.model_all_pairwise_tests
# Overall table
<- outcome.model_all_pairwise_tests %>%
outcome.model_pairwise_tests filter(classA == 1) %>% # only get low stable comparisons
select(!df) %>% # remove df column (it was empty)
add.significance.variable
# Increasing comparisons - significant only
<- outcome.model_pairwise_tests %>%
outcome.model_pairwise_tests_significant_increasing filter(classB == 2 & Significance == "Significant") %>%
mutate(`Class comparison` = c("Increasing")) %>%
select(Variable = var,
`Class comparison`,
`Chi square` = chisq,
p) outcome.model_pairwise_tests_significant_increasing
# Decreasing comparisons - significant only
<- outcome.model_pairwise_tests %>%
outcome.model_pairwise_tests_significant_decreasing filter(classB == 3 & Significance == "Significant") %>%
mutate(`Class comparison` = c("Decreasing")) %>%
select(Variable = var,
`Class comparison`,
`Chi square` = chisq,
p) outcome.model_pairwise_tests_significant_decreasing
# All significant results
<- rbind(outcome.model_pairwise_tests_significant_increasing,
outcome.model_pairwise_tests_significant
outcome.model_pairwise_tests_significant_decreasing)#outcome.model_pairwise_tests_significant
Summary of variables that are significant using pairwise t-tests: Using “DE3STEP” in Mplus: (Y indicates same as original analysis)
Increasing:
Decreasing:
Both:
Missing but reported in original analysis:
Similar to the auxilary DE3STEP approach, the BCH approach:
The BCH method, BCH for short, described in Vermunt (2010) and also in Bakk et al. (2013). For the distal outcome model that evaluates the means across classes for a continuous auxiliary variable these simulations show that the BCH method substantially outperforms Lanza’s method and the 3-step method. The BCH method avoids shifts in latent class in the final stage that the 3-step method is susceptible to. In its final stage the BCH method uses a weighted multiple group analysis, where the groups correspond to the latent classes, and thus the class shift is not possible because the classes are known. In addition, the BCH method performs well when the variance of the auxiliary variable differs substantially across classes, i.e., resolving the problems that Lanza’s method is susceptible to. The BCH method uses weights wij which reflect the measurement error of the latent class variable. In the estimation of the auxiliary model, the i-th observation in class/group j is assigned a weight of wij and the auxiliary model is estimated as a multiple group model using these weights.
No errors were thrown when running this BCH model - all variables were thus included in the analyses.
<- outcome.model[["X.Users.katiethompson.Documents.PhD.LISS.DTP_Louise_and_Tim.Social.isolation.trajectories_Paper.1.data_analysis.mplus.GMM.clustered.full_sample.3step.output.outcomes..isolation_3traj_full_sample_clustered_3STEP_BCH_outcomes.out"]][["lcCondMeans"]][["pairwise"]]
outcome.model_all_pairwise_tests_bch
# Overall table
<- outcome.model_all_pairwise_tests_bch %>%
outcome.model_pairwise_tests_bch filter(classA == 1) %>% # only get low stable comparisons
select(!df) %>% # remove df column (it was empty)
add.significance.variable
# Increasing comparisons - significant only
<- outcome.model_pairwise_tests_bch %>%
outcome.model_pairwise_tests_increasing_bch filter(classB == 2) %>%
mutate(`Class comparison` = c("Increasing")) %>%
select(Variable = var,
`Class comparison`,
`Chi square` = chisq,
p)
# Decreasing comparisons - significant only
<- outcome.model_pairwise_tests_bch %>%
outcome.model_pairwise_tests_decreasing_bch filter(classB == 3) %>%
mutate(`Class comparison` = c("Decreasing")) %>%
select(Variable = var,
`Class comparison`,
`Chi square` = chisq,
p)
# All significant results
<- rbind(outcome.model_pairwise_tests_increasing_bch,
outcome.model_pairwise_tests_bch_full
outcome.model_pairwise_tests_decreasing_bch)
kable(outcome.model_pairwise_tests_bch_full) # compare to auxilary
Variable | Class comparison | Chi square | p |
---|---|---|---|
DXMDEE18 | Increasing | 1.236 | 0.266 |
DXGADE18 | Increasing | 0.925 | 0.336 |
DXADHD5_18E | Increasing | 4.424 | 0.035 |
CDMODE18 | Increasing | 7.823 | 0.005 |
DXALCDEPE18 | Increasing | 0.666 | 0.415 |
DXMARJE18 | Increasing | 3.949 | 0.047 |
DXPTSD5LFE18 | Increasing | 1.162 | 0.281 |
DXPTSD5CUE18 | Increasing | 0.436 | 0.509 |
PSYEXPCE18 | Increasing | 8.742 | 0.003 |
SHARMSUICE18 | Increasing | 4.666 | 0.031 |
SRVUSEMHE18 | Increasing | 3.433 | 0.064 |
NEETE18 | Increasing | 9.257 | 0.002 |
MDESXE18 | Increasing | 2.696 | 0.101 |
GADSXE18 | Increasing | 0.256 | 0.613 |
SR_SYMTOT18E | Increasing | 29.268 | 0.000 |
CDSXE18 | Increasing | 12.461 | 0.000 |
ALCSXE18 | Increasing | 0.670 | 0.413 |
MARJSXE18 | Increasing | 3.623 | 0.057 |
PSYSYMPE18 | Increasing | 3.513 | 0.061 |
BMIE18 | Increasing | 2.624 | 0.105 |
LNCRP_E18_4SD | Increasing | 0.188 | 0.665 |
PHYACTE18 | Increasing | 5.459 | 0.019 |
SMKCNUME18 | Increasing | 7.143 | 0.008 |
LONELYE18 | Increasing | 8.615 | 0.003 |
LIFSATE18 | Increasing | 9.274 | 0.002 |
TECHE18 | Increasing | 5.280 | 0.022 |
COPSTRSE18 | Increasing | 12.503 | 0.000 |
PSQIE18 | Increasing | 5.573 | 0.018 |
JPREPSE18 | Increasing | 0.005 | 0.942 |
JPREPAE18 | Increasing | 7.964 | 0.005 |
OPTIME18 | Increasing | 33.637 | 0.000 |
JBSCHACTE18 | Increasing | 0.296 | 0.587 |
DXMDEE18 | Decreasing | 3.020 | 0.082 |
DXGADE18 | Decreasing | 0.056 | 0.812 |
DXADHD5_18E | Decreasing | 2.625 | 0.105 |
CDMODE18 | Decreasing | 1.349 | 0.245 |
DXALCDEPE18 | Decreasing | 0.162 | 0.687 |
DXMARJE18 | Decreasing | 3.112 | 0.078 |
DXPTSD5LFE18 | Decreasing | 0.174 | 0.677 |
DXPTSD5CUE18 | Decreasing | 0.047 | 0.828 |
PSYEXPCE18 | Decreasing | 8.017 | 0.005 |
SHARMSUICE18 | Decreasing | 2.964 | 0.085 |
SRVUSEMHE18 | Decreasing | 0.000 | 0.985 |
NEETE18 | Decreasing | 3.585 | 0.058 |
MDESXE18 | Decreasing | 5.340 | 0.021 |
GADSXE18 | Decreasing | 0.131 | 0.717 |
SR_SYMTOT18E | Decreasing | 0.007 | 0.934 |
CDSXE18 | Decreasing | 1.194 | 0.274 |
ALCSXE18 | Decreasing | 0.103 | 0.749 |
MARJSXE18 | Decreasing | 4.476 | 0.034 |
PSYSYMPE18 | Decreasing | 0.102 | 0.749 |
BMIE18 | Decreasing | 0.119 | 0.731 |
LNCRP_E18_4SD | Decreasing | 1.198 | 0.274 |
PHYACTE18 | Decreasing | 16.671 | 0.000 |
SMKCNUME18 | Decreasing | 6.196 | 0.013 |
LONELYE18 | Decreasing | 3.002 | 0.083 |
LIFSATE18 | Decreasing | 4.679 | 0.031 |
TECHE18 | Decreasing | 2.797 | 0.094 |
COPSTRSE18 | Decreasing | 0.936 | 0.333 |
PSQIE18 | Decreasing | 0.391 | 0.532 |
JPREPSE18 | Decreasing | 0.124 | 0.725 |
JPREPAE18 | Decreasing | 1.281 | 0.258 |
OPTIME18 | Decreasing | 9.899 | 0.002 |
JBSCHACTE18 | Decreasing | 0.216 | 0.642 |
# kable(outcome.model_pairwise_tests_significant)
Summary of variables that were significant using pairwise t-tests: Using “BCH” in Mplus (Y indicates same as original analysis)
Increasing:
Decreasing:
Both:
Missing here but included in original:
Work by Katherine N Thompson
katherine.n.thompson@kcl.ac.uk