This example uses the immortal Holzinger Swineford data set.
library(OpenMx)
data(HS.ability.data)
The OpenMx model looks like this:
$ageym <- HS.ability.data$agey*12 + HS.ability.data$agem
HS.ability.data$male <- as.numeric(HS.ability.data$Gender == 'Male')
HS.ability.data
# Specify variables
<- c('visual','cubes','paper','flags','paperrev','flagssub',
indicators 'general','paragrap','sentence','wordc','wordm')
<- c("male","ageym","grade")
covariates = c("g", covariates)
latents
# Build the model
<- mxModel(
mimicModel "MIMIC", type="RAM",
manifestVars = indicators, latentVars = latents,
# Set up exogenous predictors
mxPath("one", covariates, labels=paste0('data.',covariates), free=FALSE),
# Fix factor variance
mxPath('g', arrows=2, free=FALSE, values=1),
# Error variances:
mxPath(from=c(indicators), arrows=2, free=TRUE, values=10),
# Means (saturated means model):
mxPath(from="one", to=indicators, values=rep(5, length(indicators))),
# Loadings:
mxPath(from="g", to=indicators, values=.5),
# Covariate paths
mxPath(covariates, "g", labels=covariates),
# Data
mxData(observed = HS.ability.data, type = "raw"))
# Get some good starting values for regularization. This
# saves 2-3 minutes on my laptop.
<- mxRun(mimicModel) mimicModel
## Running MIMIC with 36 parameters
Add the penalty:
<- mxModel(
mimicModel
mimicModel,mxMatrix('Full',1,1,free=TRUE,values=0,labels="lambda",name="hparam"),
# Set scale to ML estimates for adaptive lasso
mxPenaltyLASSO(what=covariates, name="LASSO",
scale = coef(mimicModel)[covariates],
lambda = 0, lambda.max =2, lambda.step=.04)
)
Run the regularization. With only three covariates, the plot of results is not very exciting. We learn that sex is not a good predictor of this factor.
<- mxPenaltySearch(mimicModel) regMIMIC
## Running MIMIC with 37 parameters
## Warning: In model 'MIMIC' Optimizer returned a non-zero status code 6. The
## model does not satisfy the first-order optimality conditions to the required
## accuracy, and no improved point for the merit function could be found during
## the final linesearch (Mx status RED)
<- regMIMIC$compute$steps$PS$output$detail
detail
library(reshape2)
library(ggplot2)
<- detail[,c(covariates, 'lambda')]
est ggplot(melt(est, id.vars = 'lambda')) +
geom_line(aes(x=lambda, y=value, color=variable)) +
geom_vline(aes(xintercept=coef(regMIMIC)['lambda']),
linetype="dashed", alpha=.5)
The regularized factor loadings can be found here,
$EBIC == min(detail$EBIC), covariates] detail[detail
## male ageym grade
## 27 3.590518e-07 -0.02815178 1.0634
The regularization causes a lot of bias. One way to deal with this is to fix zerod parameters to zero, discard the regularization penalty, and re-fit model.
<- mxPenaltyZap(regMIMIC) regMIMIC
## Zapping 'male'
## Fixing 'lambda'
## Tip: Use
## model = mxRun(model)
## to re-estimate the model without any penalty terms.
<- mxRun(regMIMIC) regMIMIC
## Running MIMIC with 35 parameters
summary(regMIMIC)
## Summary of MIMIC
##
## free parameters:
## name matrix row col Estimate Std.Error A
## 1 MIMIC.A[1,12] A visual g 2.61817480 0.36295833
## 2 MIMIC.A[2,12] A cubes g 0.93494040 0.25687732
## 3 MIMIC.A[3,12] A paper g 0.70084630 0.14928785
## 4 MIMIC.A[4,12] A flags g 1.57351103 0.49761089
## 5 MIMIC.A[5,12] A paperrev g 0.99010293 0.24583101
## 6 MIMIC.A[6,12] A flagssub g 3.33349091 0.64431006
## 7 MIMIC.A[7,12] A general g 9.23700974 0.54046894
## 8 MIMIC.A[8,12] A paragrap g 2.53491904 0.15813300
## 9 MIMIC.A[9,12] A sentence g 3.96091992 0.22542876
## 10 MIMIC.A[10,12] A wordc g 3.80400302 0.26189262
## 11 MIMIC.A[11,12] A wordm g 5.73048093 0.34264720
## 12 ageym A g ageym -0.02870564 0.00629466
## 13 grade A g grade 1.08144512 0.23292638
## 14 MIMIC.S[1,1] S visual visual 40.27115654 3.35122006
## 15 MIMIC.S[2,2] S cubes cubes 21.00803433 1.72260073
## 16 MIMIC.S[3,3] S paper paper 7.36560842 0.60589608
## 17 MIMIC.S[4,4] S flags flags 78.47387915 6.42387435 !
## 18 MIMIC.S[5,5] S paperrev paperrev 8.35234683 0.99382090
## 19 MIMIC.S[6,6] S flagssub flagssub 56.56097128 6.79062885 !
## 20 MIMIC.S[7,7] S general general 45.64540523 4.81969774
## 21 MIMIC.S[8,8] S paragrap paragrap 4.06597862 0.40155971
## 22 MIMIC.S[9,9] S sentence sentence 6.80450945 0.74741654
## 23 MIMIC.S[10,10] S wordc wordc 13.88562716 1.28467866
## 24 MIMIC.S[11,11] S wordm wordm 17.27856604 1.79625819 !
## 25 MIMIC.M[1,1] M 1 visual 21.29329316 5.64113756
## 26 MIMIC.M[1,2] M 1 cubes 21.38064737 1.99837351
## 27 MIMIC.M[1,3] M 1 paper 12.00174126 1.55603693
## 28 MIMIC.M[1,4] M 1 flags 13.00224262 3.37368094
## 29 MIMIC.M[1,5] M 1 paperrev 12.12978396 2.18056880
## 30 MIMIC.M[1,6] M 1 flagssub 24.45734651 7.26492062
## 31 MIMIC.M[1,7] M 1 general 11.26667232 19.79284450
## 32 MIMIC.M[1,8] M 1 paragrap 1.12601089 5.38590221
## 33 MIMIC.M[1,9] M 1 sentence 4.77316589 8.44016604
## 34 MIMIC.M[1,10] M 1 wordc 14.03601230 8.10045680
## 35 MIMIC.M[1,11] M 1 wordm -2.91414063 12.19544828
##
## Model Statistics:
## | Parameters | Degrees of Freedom | Fit (-2lnL units)
## Model: 35 2964 17843.68
## Saturated: 77 2922 NA
## Independence: 22 2977 NA
## Number of observations/statistics: 301/2999
##
## Information Criteria:
## | df Penalty | Parameters Penalty | Sample-Size Adjusted
## AIC: 11915.6773 17913.68 17923.19
## BIC: 927.8025 18043.43 17932.43
## To get additional fit indices, see help(mxRefModels)
## timestamp: 2025-05-19 10:51:21
## Wall clock time: 3.005078 secs
## optimizer: SLSQP
## OpenMx version number: 2.22.6
## Need help? See help(mxSummary)