Load relevant packages
library(dplyr)
## Warning: package 'dplyr' was built under R version 4.2.3
##
## Attaching package: 'dplyr'
## The following objects are masked from 'package:stats':
##
## filter, lag
## The following objects are masked from 'package:base':
##
## intersect, setdiff, setequal, union
library(tidyverse)
## Warning: package 'tidyverse' was built under R version 4.2.3
## Warning: package 'ggplot2' was built under R version 4.2.3
## Warning: package 'tibble' was built under R version 4.2.3
## Warning: package 'tidyr' was built under R version 4.2.3
## Warning: package 'readr' was built under R version 4.2.3
## Warning: package 'purrr' was built under R version 4.2.3
## Warning: package 'stringr' was built under R version 4.2.3
## Warning: package 'forcats' was built under R version 4.2.3
## Warning: package 'lubridate' was built under R version 4.2.3
## ── Attaching core tidyverse packages ──────────────────────── tidyverse 2.0.0 ──
## ✔ forcats 1.0.0 ✔ readr 2.1.4
## ✔ ggplot2 3.4.3 ✔ stringr 1.5.0
## ✔ lubridate 1.9.3 ✔ tibble 3.2.1
## ✔ purrr 1.0.2 ✔ tidyr 1.3.0
## ── Conflicts ────────────────────────────────────────── tidyverse_conflicts() ──
## ✖ dplyr::filter() masks stats::filter()
## ✖ dplyr::lag() masks stats::lag()
## ℹ Use the conflicted package (<http://conflicted.r-lib.org/>) to force all conflicts to become errors
library(lavaan)
## Warning: package 'lavaan' was built under R version 4.2.3
## This is lavaan 0.6-16
## lavaan is FREE software! Please report any bugs.
library(remotes)
## Warning: package 'remotes' was built under R version 4.2.3
library(foreign)
## Warning: package 'foreign' was built under R version 4.2.3
library(semPlot)
## Warning: package 'semPlot' was built under R version 4.2.3
Import data
#Data import#
imported_data <- read.csv("C:/Users/alfre/Documents/Master´s Thesis/Data/All Countries Complete corrected electricity data wide imputed.csv", header = TRUE, sep = ";")
data <- imported_data
Wrangling/renaming
#Exclude participants from Austria, Italy & Turkey
data <- data %>%
filter(Country <= 3)
data <- data %>%
mutate(initial_survey_age = gsub(",", ".", initial_survey_age),
initial_survey_age = as.numeric(initial_survey_age))
data <- data %>%
mutate(inital_survey_age = round(initial_survey_age))
data <- data %>%
mutate(initial_survey_socialstatus = gsub(",", ".", initial_survey_socialstatus),
initial_survey_socialstatus = as.numeric(initial_survey_socialstatus))
data <- data %>%
mutate(initial_survey_socialstatus = round(initial_survey_socialstatus))
data <- data %>%
mutate(T1_attitude = gsub(",", ".", T1_attitude),
T1_attitude = as.numeric(T1_attitude))
data <- data %>%
mutate(T1_attitude = round(T1_attitude))
data <- data %>%
mutate(T2_attitude = gsub(",", ".", T2_attitude),
T2_attitude = as.numeric(T2_attitude))
data <- data %>%
mutate(T2_attitude = round(T2_attitude))
data <- data %>%
mutate(T3_attitude = gsub(",", ".", T3_attitude),
T3_attitude = as.numeric(T3_attitude))
data <- data %>%
mutate(T3_attitude = round(T3_attitude))
data <- data %>%
mutate(T4_attitude = gsub(",", ".", T4_attitude),
T4_attitude = as.numeric(T4_attitude))
data <- data %>%
mutate(T4_attitude = round(T4_attitude))
data <- data %>%
mutate(T5_attitude = gsub(",", ".", T5_attitude),
T5_attitude = as.numeric(T5_attitude))
data <- data %>%
mutate(T5_attitude = round(T5_attitude))
data <- data %>%
mutate(T1_Inorms = gsub(",", ".", T1_Inorms),
T1_Inorms = as.numeric(T1_Inorms))
data <- data %>%
mutate(T1_Inorms = round(T1_Inorms))
data <- data %>%
mutate(T2_Inorms = gsub(",", ".", T2_Inorms),
T2_Inorms = as.numeric(T2_Inorms))
data <- data %>%
mutate(T2_Inorms = round(T2_Inorms))
data <- data %>%
mutate(T3_Inorms = gsub(",", ".", T3_Inorms),
T3_Inorms = as.numeric(T3_Inorms))
data <- data %>%
mutate(T3_Inorms = round(T3_Inorms))
data <- data %>%
mutate(T4_Inorms = gsub(",", ".", T4_Inorms),
T4_Inorms = as.numeric(T4_Inorms))
data <- data %>%
mutate(T4_Inorms = round(T4_Inorms))
data <- data %>%
mutate(T5_Inorms = gsub(",", ".", T5_Inorms),
T5_Inorms = as.numeric(T5_Inorms))
data <- data %>%
mutate(T5_Inorms = round(T5_Inorms))
data <- data %>%
mutate(T1_dnorms = gsub(",", ".", T1_dnorms),
T1_dnorms = as.numeric(T1_dnorms))
data <- data %>%
mutate(T1_dnorms = round(T1_dnorms))
data <- data %>%
mutate(T2_dnorms = gsub(",", ".", T2_dnorms),
T2_dnorms = as.numeric(T2_dnorms))
data <- data %>%
mutate(T2_dnorms = round(T2_dnorms))
data <- data %>%
mutate(T3_dnorms = gsub(",", ".", T3_dnorms),
T3_dnorms = as.numeric(T3_dnorms))
data <- data %>%
mutate(T3_dnorms = round(T3_dnorms))
data <- data %>%
mutate(T4_dnorms = gsub(",", ".", T4_dnorms),
T4_dnorms = as.numeric(T4_dnorms))
data <- data %>%
mutate(T4_dnorms = round(T4_dnorms))
data <- data %>%
mutate(T5_dnorms = gsub(",", ".", T5_dnorms),
T5_dnorms = as.numeric(T5_dnorms))
data <- data %>%
mutate(T5_dnorms = round(T5_dnorms))
data <- data %>%
mutate(T1_pbc1 = gsub(",", ".", T1_pbc1),
T1_pbc1 = as.numeric(T1_pbc1))
data <- data %>%
mutate(T1_pbc1 = round(T1_pbc1))
data <- data %>%
mutate(T2_pbc1 = gsub(",", ".", T2_pbc1),
T2_pbc1 = as.numeric(T2_pbc1))
data <- data %>%
mutate(T2_pbc1 = round(T2_pbc1))
data <- data %>%
mutate(T3_pbc1 = gsub(",", ".", T3_pbc1),
T3_pbc1 = as.numeric(T3_pbc1))
data <- data %>%
mutate(T3_pbc1 = round(T3_pbc1))
data <- data %>%
mutate(T4_pbc1 = gsub(",", ".", T4_pbc1),
T4_pbc1 = as.numeric(T4_pbc1))
data <- data %>%
mutate(T4_pbc1 = round(T4_pbc1))
data <- data %>%
mutate(T5_pbc1 = gsub(",", ".", T5_pbc1),
T5_pbc1 = as.numeric(T5_pbc1))
data <- data %>%
mutate(T5_pbc1 = round(T5_pbc1))
data <- data %>%
mutate(T1_pbc2 = gsub(",", ".", T1_pbc2),
T1_pbc2 = as.numeric(T1_pbc2))
data <- data %>%
mutate(T1_pbc2 = round(T1_pbc2))
data <- data %>%
mutate(T2_pbc2 = gsub(",", ".", T2_pbc2),
T2_pbc2 = as.numeric(T2_pbc2))
data <- data %>%
mutate(T2_pbc2 = round(T2_pbc2))
data <- data %>%
mutate(T3_pbc2 = gsub(",", ".", T3_pbc2),
T3_pbc2 = as.numeric(T3_pbc2))
data <- data %>%
mutate(T3_pbc2 = round(T3_pbc2))
data <- data %>%
mutate(T4_pbc2 = gsub(",", ".", T4_pbc2),
T4_pbc2 = as.numeric(T4_pbc2))
data <- data %>%
mutate(T4_pbc2 = round(T4_pbc2))
data <- data %>%
mutate(T5_pbc2 = gsub(",", ".", T5_pbc2),
T5_pbc2 = as.numeric(T5_pbc2))
data <- data %>%
mutate(T5_pbc2 = round(T5_pbc2))
data <- data %>%
mutate(T1_intention = gsub(",", ".", T1_intention),
T1_intention = as.numeric(T1_intention))
data <- data %>%
mutate(T1_intention = round(T1_intention))
data <- data %>%
mutate(T2_intention = gsub(",", ".", T2_intention),
T2_intention = as.numeric(T2_intention))
data <- data %>%
mutate(T2_intention = round(T2_intention))
data <- data %>%
mutate(T3_intention = gsub(",", ".", T3_intention),
T3_intention = as.numeric(T3_intention))
data <- data %>%
mutate(T3_intention = round(T3_intention))
data <- data %>%
mutate(T4_intention = gsub(",", ".", T4_intention),
T4_intention = as.numeric(T4_intention))
data <- data %>%
mutate(T4_intention = round(T4_intention))
data <- data %>%
mutate(T5_intention = gsub(",", ".", T5_intention),
T5_intention = as.numeric(T5_intention))
data <- data %>%
mutate(T5_intention = round(T5_intention))
data <- data %>%
mutate(W1_comsumption_pp_7days = gsub(",", ".", W1_comsumption_pp_7days),
W1_comsumption_pp_7days = as.numeric(W1_comsumption_pp_7days))
data <- data %>%
mutate(W2_comsumption_pp_7days = gsub(",", ".", W2_comsumption_pp_7days),
W2_comsumption_pp_7days = as.numeric(W2_comsumption_pp_7days))
data <- data %>%
mutate(W3_comsumption_pp_7days = gsub(",", ".", W3_comsumption_pp_7days),
W3_comsumption_pp_7days = as.numeric(W3_comsumption_pp_7days))
data <- data %>%
mutate(W4_comsumption_pp_7days = gsub(",", ".", W4_comsumption_pp_7days),
W4_comsumption_pp_7days = as.numeric(W4_comsumption_pp_7days))
data <- data %>%
mutate(W5_comsumption_pp_7days = gsub(",", ".", W5_comsumption_pp_7days),
W5_comsumption_pp_7days = as.numeric(W5_comsumption_pp_7days))
# Renaming variables
data <- data %>%
rename(v11 = T1_attitude,
v12 = T2_attitude,
v13 = T3_attitude,
v14 = T4_attitude,
v15 = T5_attitude)
data <- data %>%
rename(v21 = T1_Inorms,
v22 = T2_Inorms,
v23 = T3_Inorms,
v24 = T4_Inorms,
v25 = T5_Inorms)
data <- data %>%
rename(v31 = T1_dnorms,
v32 = T2_dnorms,
v33 = T3_dnorms,
v34 = T4_dnorms,
v35 = T5_dnorms)
data <- data %>%
rename(v41 = T1_pbc1,
v42 = T2_pbc1,
v43 = T3_pbc1,
v44 = T4_pbc1,
v45 = T5_pbc1)
data <- data %>%
rename(v51 = T1_pbc2,
v52 = T2_pbc2,
v53 = T3_pbc2,
v54 = T4_pbc2,
v55 = T5_pbc2)
data <- data %>%
rename(v61 = T1_intention,
v62 = T2_intention,
v63 = T3_intention,
v64 = T4_intention,
v65 = T5_intention)
data <- data %>%
rename(v71 = W1_comsumption_pp_7days,
v72 = W2_comsumption_pp_7days,
v73 = W3_comsumption_pp_7days,
v74 = W4_comsumption_pp_7days,
v75 = W5_comsumption_pp_7days)
Removing negative imputed values from v7
variances_v7 <- apply(data[, c("v71", "v72", "v73", "v74", "v75")], 2, var)
variances_v7
## v71 v72 v73 v74 v75
## 10475.916 9251.266 9267.434 8115.179 7543.790
variances_v6 <- apply(data[, c("v61", "v62", "v63", "v64", "v65")], 2, var)
variances_v6
## v61 v62 v63 v64 v65
## 0.8348481 0.7412842 0.7085184 0.7355892 0.6885547
#Assessing the effect of imputation on v71-v75#####
num_negative_v71 <- sum(data$v71 < 0)
num_negative_v72 <- sum(data$v72 < 0)
num_negative_v73 <- sum(data$v73 < 0)
num_negative_v74 <- sum(data$v74 < 0)
num_negative_v75 <- sum(data$v75 < 0)
#Removing negative values from v71-v75#
data_no_negative_v7 <- data
# For v71
no_negative_v71 <- data_no_negative_v7$v71 >= 0
data_no_negative_v7 <- data_no_negative_v7[no_negative_v71, ]
# For v72
no_negative_v72 <- data_no_negative_v7$v72 >= 0
data_no_negative_v7 <- data_no_negative_v7[no_negative_v72, ]
# For v73
no_negative_v73 <- data_no_negative_v7$v73 >= 0
data_no_negative_v7 <- data_no_negative_v7[no_negative_v73, ]
# For v71
no_negative_v74 <- data_no_negative_v7$v74 >= 0
data_no_negative_v7 <- data_no_negative_v7[no_negative_v74, ]
# For v71
no_negative_v75 <- data_no_negative_v7$v75 >= 0
data_no_negative_v7 <- data_no_negative_v7[no_negative_v75, ]
Standardizing no negative v7 data (NONEG)
#Standardising no negative v7-dataset#
variables_to_standardize <- c("v11", "v21", "v31", "v41", "v51", "v21", "v22", "v23", "v24", "v25", "v31", "v32", "v33", "v34", "v35", "v41", "v42", "v43", "v44", "v45", "v51", "v52", "v53", "v54", "v55", "v61", "v62", "v63", "v64", "v65", "v71", "v72", "v73", "v74", "v75")
noneg_data_standardized <- data_no_negative_v7
noneg_data_standardized <- noneg_data_standardized %>%
mutate(across(all_of(variables_to_standardize), scale))
Demographics
#Demographics
noneg_data_standardized <- noneg_data_standardized %>%
mutate(initial_survey_hhsize = as.numeric(initial_survey_hhsize))
noneg_data_standardized <- noneg_data_standardized %>%
mutate(initial_survey_energypoverty = gsub(",", ".", initial_survey_energypoverty),
initial_survey_energypoverty = as.numeric(initial_survey_energypoverty))
noneg_data_standardized <- noneg_data_standardized %>%
mutate(initial_survey_energypoverty = round(initial_survey_energypoverty))
noneg_data_standardized <- noneg_data_standardized %>%
mutate(initial_survey_energypovpercent = gsub(",", ".", initial_survey_energypovpercent),
initial_survey_energypovpercent = as.numeric(initial_survey_energypovpercent))
noneg_data_standardized <- noneg_data_standardized %>%
mutate(initial_survey_energypovpercent = round(initial_survey_energypovpercent))
noneg_data_standardized <- noneg_data_standardized %>%
group_by(id) %>%
mutate(energypoverty_MEAN = mean(c(initial_survey_energypoverty, initial_survey_energypovpercent)))
Shapiro-Wilks test
selected_columns <- noneg_data_standardized[, c("v11", "v21", "v31", "v41", "v51", "v21", "v22", "v23", "v24", "v25", "v31", "v32", "v33", "v34", "v35", "v41", "v42", "v43", "v44", "v45", "v51", "v52", "v53", "v54", "v55", "v61", "v62", "v63", "v64", "v65", "v71", "v72", "v73", "v74", "v75")]
normality_tests <- lapply(selected_columns, shapiro.test)
names(normality_tests) <- c("v11", "v21", "v31", "v41", "v51", "v21", "v22", "v23", "v24", "v25", "v31", "v32", "v33", "v34", "v35", "v41", "v42", "v43", "v44", "v45", "v51", "v52", "v53", "v54", "v55", "v61", "v62", "v63", "v64", "v65", "v71", "v72", "v73", "v74", "v75")
normality_tests
## $v11
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.86286, p-value < 2.2e-16
##
##
## $v21
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.87353, p-value < 2.2e-16
##
##
## $v31
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.84644, p-value < 2.2e-16
##
##
## $v41
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.87011, p-value < 2.2e-16
##
##
## $v51
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.89559, p-value < 2.2e-16
##
##
## $v21
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.87353, p-value < 2.2e-16
##
##
## $v22
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.84992, p-value < 2.2e-16
##
##
## $v23
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.86538, p-value < 2.2e-16
##
##
## $v24
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.8472, p-value < 2.2e-16
##
##
## $v25
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.84331, p-value < 2.2e-16
##
##
## $v31
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.84644, p-value < 2.2e-16
##
##
## $v32
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.83406, p-value < 2.2e-16
##
##
## $v33
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.83349, p-value < 2.2e-16
##
##
## $v34
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.83378, p-value < 2.2e-16
##
##
## $v35
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.82355, p-value < 2.2e-16
##
##
## $v41
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.87011, p-value < 2.2e-16
##
##
## $v42
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.85783, p-value < 2.2e-16
##
##
## $v43
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.85917, p-value < 2.2e-16
##
##
## $v44
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.8573, p-value < 2.2e-16
##
##
## $v45
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.86033, p-value < 2.2e-16
##
##
## $v51
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.89559, p-value < 2.2e-16
##
##
## $v52
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.86917, p-value < 2.2e-16
##
##
## $v53
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.87857, p-value < 2.2e-16
##
##
## $v54
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.88186, p-value < 2.2e-16
##
##
## $v55
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.88024, p-value < 2.2e-16
##
##
## $v61
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.87105, p-value < 2.2e-16
##
##
## $v62
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.85346, p-value < 2.2e-16
##
##
## $v63
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.85035, p-value < 2.2e-16
##
##
## $v64
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.86823, p-value < 2.2e-16
##
##
## $v65
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.86115, p-value < 2.2e-16
##
##
## $v71
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.84098, p-value < 2.2e-16
##
##
## $v72
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.83168, p-value < 2.2e-16
##
##
## $v73
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.81724, p-value < 2.2e-16
##
##
## $v74
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.83624, p-value < 2.2e-16
##
##
## $v75
##
## Shapiro-Wilk normality test
##
## data: X[[i]]
## W = 0.84444, p-value < 2.2e-16
Initial SEM
sem <- 'Att =~ v11 + v12 + v13 + v14 + v15
Sub =~ v21 + v22 + v23 + v24 + v25
+ v31 + v32 + v33 + v34 + v35
PBC =~ v41 + v42 + v43 + v44 + v45
+ v51 + v52 + v53 + v54 + v55
Int =~ v61 + v62 + v63 + v64 + v65
kWh =~ v71 + v72 + v73 + v74 + v75
Int ~ Att + Sub + PBC
kWh ~ Int + PBC + energypoverty_MEAN + initial_survey_age + initial_survey_hhsize + initial_survey_gender
Att ~~ Sub
Sub ~~ PBC
PBC ~~ Att'
fit <- sem(sem, data=noneg_data_standardized)
summary(fit, fit.measures=TRUE, standardized=TRUE, rsquare=TRUE, modindices=TRUE)
## lavaan 0.6.16 ended normally after 84 iterations
##
## Estimator ML
## Optimization method NLMINB
## Number of model parameters 82
##
## Used Total
## Number of observations 2352 2363
##
## Model Test User Model:
##
## Test statistic 12483.550
## Degrees of freedom 688
## P-value (Chi-square) 0.000
##
## Model Test Baseline Model:
##
## Test statistic 66738.515
## Degrees of freedom 735
## P-value 0.000
##
## User Model versus Baseline Model:
##
## Comparative Fit Index (CFI) 0.821
## Tucker-Lewis Index (TLI) 0.809
##
## Loglikelihood and Information Criteria:
##
## Loglikelihood user model (H0) -88095.288
## Loglikelihood unrestricted model (H1) -81853.513
##
## Akaike (AIC) 176354.576
## Bayesian (BIC) 176827.144
## Sample-size adjusted Bayesian (SABIC) 176566.613
##
## Root Mean Square Error of Approximation:
##
## RMSEA 0.085
## 90 Percent confidence interval - lower 0.084
## 90 Percent confidence interval - upper 0.087
## P-value H_0: RMSEA <= 0.050 0.000
## P-value H_0: RMSEA >= 0.080 1.000
##
## Standardized Root Mean Square Residual:
##
## SRMR 0.082
##
## Parameter Estimates:
##
## Standard errors Standard
## Information Expected
## Information saturated (h1) model Structured
##
## Latent Variables:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## Att =~
## v11 1.000 0.656 0.655
## v12 0.972 0.030 32.180 0.000 0.637 0.776
## v13 1.042 0.032 33.011 0.000 0.683 0.802
## v14 1.089 0.032 33.879 0.000 0.714 0.830
## v15 1.058 0.031 33.699 0.000 0.693 0.824
## Sub =~
## v21 1.000 0.648 0.647
## v22 1.128 0.037 30.583 0.000 0.731 0.730
## v23 1.148 0.037 31.030 0.000 0.743 0.743
## v24 1.177 0.037 31.672 0.000 0.762 0.763
## v25 1.124 0.037 30.505 0.000 0.728 0.728
## v31 0.826 0.035 23.460 0.000 0.535 0.535
## v32 0.994 0.036 27.533 0.000 0.644 0.643
## v33 1.084 0.037 29.639 0.000 0.702 0.703
## v34 1.085 0.037 29.631 0.000 0.703 0.702
## v35 1.075 0.037 29.414 0.000 0.696 0.696
## PBC =~
## v41 1.000 0.516 0.516
## v42 1.292 0.056 22.916 0.000 0.667 0.667
## v43 1.292 0.056 22.916 0.000 0.667 0.667
## v44 1.345 0.057 23.412 0.000 0.694 0.693
## v45 1.293 0.056 22.936 0.000 0.667 0.668
## v51 1.162 0.054 21.554 0.000 0.600 0.600
## v52 1.303 0.057 23.008 0.000 0.672 0.672
## v53 1.388 0.058 23.808 0.000 0.716 0.716
## v54 1.378 0.058 23.722 0.000 0.711 0.711
## v55 1.365 0.058 23.598 0.000 0.704 0.704
## Int =~
## v61 1.000 0.513 0.512
## v62 1.295 0.058 22.229 0.000 0.664 0.664
## v63 1.452 0.062 23.553 0.000 0.744 0.744
## v64 1.521 0.063 24.057 0.000 0.780 0.779
## v65 1.440 0.061 23.471 0.000 0.738 0.738
## kWh =~
## v71 1.000 0.960 0.961
## v72 1.009 0.008 126.300 0.000 0.969 0.971
## v73 1.009 0.008 125.252 0.000 0.968 0.970
## v74 1.003 0.008 120.789 0.000 0.963 0.964
## v75 0.998 0.009 116.962 0.000 0.958 0.960
##
## Regressions:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## Int ~
## Att 0.184 0.019 9.756 0.000 0.235 0.235
## Sub 0.294 0.024 12.402 0.000 0.372 0.372
## PBC 0.341 0.032 10.784 0.000 0.343 0.343
## kWh ~
## Int -0.341 0.072 -4.758 0.000 -0.182 -0.182
## PBC 0.069 0.069 0.996 0.319 0.037 0.037
## enrgypvrt_MEAN 0.083 0.021 3.864 0.000 0.086 0.079
## initial_srvy_g -0.009 0.002 -5.780 0.000 -0.009 -0.122
## intl_srvy_hhsz -0.095 0.016 -5.890 0.000 -0.099 -0.124
## intl_srvy_gndr -0.010 0.039 -0.249 0.803 -0.010 -0.005
##
## Covariances:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## Att ~~
## Sub 0.235 0.014 17.342 0.000 0.554 0.554
## Sub ~~
## PBC 0.234 0.014 17.269 0.000 0.700 0.700
## Att ~~
## PBC 0.202 0.012 16.311 0.000 0.597 0.597
##
## Variances:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## .v11 0.571 0.018 31.425 0.000 0.571 0.570
## .v12 0.268 0.009 28.462 0.000 0.268 0.398
## .v13 0.259 0.009 27.340 0.000 0.259 0.357
## .v14 0.231 0.009 25.730 0.000 0.231 0.312
## .v15 0.228 0.009 26.113 0.000 0.228 0.321
## .v21 0.583 0.018 32.015 0.000 0.583 0.582
## .v22 0.467 0.015 30.668 0.000 0.467 0.467
## .v23 0.447 0.015 30.370 0.000 0.447 0.447
## .v24 0.418 0.014 29.873 0.000 0.418 0.418
## .v25 0.470 0.015 30.717 0.000 0.470 0.470
## .v31 0.713 0.022 33.023 0.000 0.713 0.713
## .v32 0.586 0.018 32.056 0.000 0.586 0.586
## .v33 0.505 0.016 31.200 0.000 0.505 0.506
## .v34 0.507 0.016 31.204 0.000 0.507 0.507
## .v35 0.515 0.016 31.311 0.000 0.515 0.515
## .v41 0.734 0.022 32.980 0.000 0.734 0.734
## .v42 0.556 0.018 31.391 0.000 0.556 0.556
## .v43 0.556 0.018 31.391 0.000 0.556 0.556
## .v44 0.520 0.017 30.929 0.000 0.520 0.519
## .v45 0.553 0.018 31.374 0.000 0.553 0.554
## .v51 0.640 0.020 32.257 0.000 0.640 0.640
## .v52 0.550 0.018 31.312 0.000 0.550 0.549
## .v53 0.488 0.016 30.471 0.000 0.488 0.487
## .v54 0.495 0.016 30.579 0.000 0.495 0.494
## .v55 0.505 0.016 30.725 0.000 0.505 0.504
## .v61 0.738 0.023 32.538 0.000 0.738 0.737
## .v62 0.560 0.018 30.379 0.000 0.560 0.560
## .v63 0.447 0.016 28.078 0.000 0.447 0.447
## .v64 0.393 0.015 26.497 0.000 0.393 0.393
## .v65 0.455 0.016 28.282 0.000 0.455 0.455
## .v71 0.076 0.003 28.133 0.000 0.076 0.076
## .v72 0.058 0.002 25.967 0.000 0.058 0.058
## .v73 0.060 0.002 26.292 0.000 0.060 0.060
## .v74 0.070 0.003 27.506 0.000 0.070 0.070
## .v75 0.079 0.003 28.364 0.000 0.079 0.079
## Att 0.430 0.025 17.164 0.000 1.000 1.000
## Sub 0.419 0.025 17.010 0.000 1.000 1.000
## PBC 0.266 0.021 12.843 0.000 1.000 1.000
## .Int 0.084 0.007 11.380 0.000 0.318 0.318
## .kWh 0.860 0.027 31.522 0.000 0.933 0.933
##
## R-Square:
## Estimate
## v11 0.430
## v12 0.602
## v13 0.643
## v14 0.688
## v15 0.679
## v21 0.418
## v22 0.533
## v23 0.553
## v24 0.582
## v25 0.530
## v31 0.287
## v32 0.414
## v33 0.494
## v34 0.493
## v35 0.485
## v41 0.266
## v42 0.444
## v43 0.444
## v44 0.481
## v45 0.446
## v51 0.360
## v52 0.451
## v53 0.513
## v54 0.506
## v55 0.496
## v61 0.263
## v62 0.440
## v63 0.553
## v64 0.607
## v65 0.545
## v71 0.924
## v72 0.942
## v73 0.940
## v74 0.930
## v75 0.921
## Int 0.682
## kWh 0.067
##
## Modification Indices:
##
## lhs op rhs mi epc sepc.lv
## 1 Att =~ v21 0.071 -0.009 -0.006
## 2 Att =~ v22 8.414 -0.089 -0.058
## 3 Att =~ v23 0.502 -0.021 -0.014
## 4 Att =~ v24 3.600 -0.056 -0.037
## 5 Att =~ v25 1.204 -0.034 -0.022
## 6 Att =~ v31 0.990 0.036 0.024
## 7 Att =~ v32 0.550 -0.025 -0.016
## 8 Att =~ v33 7.531 0.086 0.057
## 9 Att =~ v34 0.005 0.002 0.001
## 10 Att =~ v35 22.434 0.150 0.098
## 11 Att =~ v41 91.373 0.369 0.242
## 12 Att =~ v42 54.683 0.256 0.168
## 13 Att =~ v43 124.800 0.387 0.254
## 14 Att =~ v44 135.789 0.394 0.258
## 15 Att =~ v45 138.063 0.406 0.266
## 16 Att =~ v51 34.244 -0.214 -0.140
## 17 Att =~ v52 132.302 -0.397 -0.260
## 18 Att =~ v53 83.741 -0.303 -0.199
## 19 Att =~ v54 72.240 -0.283 -0.185
## 20 Att =~ v55 128.499 -0.380 -0.249
## 21 Att =~ v61 5.324 -0.098 -0.064
## 22 Att =~ v62 9.483 -0.121 -0.079
## 23 Att =~ v63 3.286 -0.068 -0.045
## 24 Att =~ v64 0.053 -0.008 -0.006
## 25 Att =~ v65 8.544 0.110 0.072
## 26 Att =~ v71 10.201 -0.032 -0.021
## 27 Att =~ v72 0.002 0.000 0.000
## 28 Att =~ v73 2.216 0.014 0.009
## 29 Att =~ v74 0.422 0.006 0.004
## 30 Att =~ v75 7.663 -0.028 -0.019
## 31 Sub =~ v11 7.540 -0.092 -0.059
## 32 Sub =~ v12 0.149 -0.009 -0.006
## 33 Sub =~ v13 1.935 0.034 0.022
## 34 Sub =~ v14 0.700 -0.020 -0.013
## 35 Sub =~ v15 2.922 0.040 0.026
## 36 Sub =~ v41 52.449 0.327 0.212
## 37 Sub =~ v42 91.564 0.390 0.252
## 38 Sub =~ v43 82.802 0.370 0.240
## 39 Sub =~ v44 74.896 0.344 0.223
## 40 Sub =~ v45 44.737 0.272 0.176
## 41 Sub =~ v51 92.386 -0.412 -0.267
## 42 Sub =~ v52 63.713 -0.324 -0.210
## 43 Sub =~ v53 62.237 -0.307 -0.199
## 44 Sub =~ v54 58.157 -0.298 -0.193
## 45 Sub =~ v55 32.224 -0.224 -0.145
## 46 Sub =~ v61 14.911 0.200 0.129
## 47 Sub =~ v62 0.083 0.014 0.009
## 48 Sub =~ v63 4.078 0.094 0.061
## 49 Sub =~ v64 4.729 -0.100 -0.065
## 50 Sub =~ v65 2.458 -0.073 -0.047
## 51 Sub =~ v71 12.295 -0.036 -0.023
## 52 Sub =~ v72 0.001 0.000 0.000
## 53 Sub =~ v73 2.935 0.016 0.010
## 54 Sub =~ v74 0.444 0.007 0.004
## 55 Sub =~ v75 2.761 0.017 0.011
## 56 PBC =~ v11 12.238 -0.155 -0.080
## 57 PBC =~ v12 0.698 -0.027 -0.014
## 58 PBC =~ v13 1.452 0.039 0.020
## 59 PBC =~ v14 0.031 0.006 0.003
## 60 PBC =~ v15 3.141 0.056 0.029
## 61 PBC =~ v21 0.007 -0.004 -0.002
## 62 PBC =~ v22 1.482 -0.058 -0.030
## 63 PBC =~ v23 0.048 -0.010 -0.005
## 64 PBC =~ v24 0.197 0.020 0.011
## 65 PBC =~ v25 0.022 0.007 0.004
## 66 PBC =~ v31 5.349 -0.130 -0.067
## 67 PBC =~ v32 0.170 -0.021 -0.011
## 68 PBC =~ v33 4.255 0.101 0.052
## 69 PBC =~ v34 1.022 -0.050 -0.026
## 70 PBC =~ v35 3.321 0.090 0.046
## 71 PBC =~ v61 1.891 0.091 0.047
## 72 PBC =~ v62 0.136 0.023 0.012
## 73 PBC =~ v63 1.210 -0.065 -0.034
## 74 PBC =~ v64 0.785 -0.052 -0.027
## 75 PBC =~ v65 0.767 0.052 0.027
## 76 PBC =~ v71 7.841 -0.036 -0.019
## 77 PBC =~ v72 0.179 -0.005 -0.003
## 78 PBC =~ v73 2.569 0.019 0.010
## 79 PBC =~ v74 1.924 0.017 0.009
## 80 PBC =~ v75 0.005 0.001 0.000
## 81 Int =~ v11 17.835 -0.208 -0.106
## 82 Int =~ v12 0.000 0.000 0.000
## 83 Int =~ v13 14.577 0.139 0.071
## 84 Int =~ v14 1.405 -0.042 -0.022
## 85 Int =~ v15 0.180 0.015 0.008
## 86 Int =~ v21 0.330 -0.034 -0.017
## 87 Int =~ v22 25.836 -0.278 -0.142
## 88 Int =~ v23 3.632 -0.103 -0.053
## 89 Int =~ v24 14.504 -0.201 -0.103
## 90 Int =~ v25 28.677 -0.293 -0.150
## 91 Int =~ v31 1.046 0.066 0.034
## 92 Int =~ v32 0.092 0.018 0.009
## 93 Int =~ v33 44.311 0.374 0.192
## 94 Int =~ v34 20.365 0.254 0.130
## 95 Int =~ v35 37.566 0.347 0.178
## 96 Int =~ v41 119.110 0.722 0.370
## 97 Int =~ v42 247.757 0.940 0.482
## 98 Int =~ v43 267.996 0.977 0.501
## 99 Int =~ v44 286.552 0.988 0.507
## 100 Int =~ v45 251.085 0.945 0.484
## 101 Int =~ v51 164.789 -0.806 -0.413
## 102 Int =~ v52 168.348 -0.772 -0.396
## 103 Int =~ v53 178.257 -0.764 -0.392
## 104 Int =~ v54 266.942 -0.939 -0.481
## 105 Int =~ v55 227.503 -0.872 -0.447
## 106 Int =~ v71 15.453 -0.052 -0.027
## 107 Int =~ v72 0.350 0.007 0.004
## 108 Int =~ v73 3.906 0.024 0.012
## 109 Int =~ v74 2.915 0.022 0.011
## 110 Int =~ v75 0.470 -0.009 -0.005
## 111 kWh =~ v11 40.607 -0.109 -0.104
## 112 kWh =~ v12 9.890 -0.038 -0.037
## 113 kWh =~ v13 13.274 -0.045 -0.043
## 114 kWh =~ v14 2.074 -0.017 -0.016
## 115 kWh =~ v15 12.737 -0.042 -0.040
## 116 kWh =~ v21 0.929 -0.017 -0.016
## 117 kWh =~ v22 0.607 0.012 0.012
## 118 kWh =~ v23 0.003 0.001 0.001
## 119 kWh =~ v24 46.042 0.102 0.098
## 120 kWh =~ v25 11.547 0.053 0.051
## 121 kWh =~ v31 0.802 0.017 0.016
## 122 kWh =~ v32 0.057 -0.004 -0.004
## 123 kWh =~ v33 10.165 -0.052 -0.049
## 124 kWh =~ v34 0.402 0.010 0.010
## 125 kWh =~ v35 4.940 -0.036 -0.035
## 126 kWh =~ v41 41.990 -0.123 -0.119
## 127 kWh =~ v42 21.885 -0.080 -0.077
## 128 kWh =~ v43 59.295 -0.131 -0.126
## 129 kWh =~ v44 25.078 -0.083 -0.080
## 130 kWh =~ v45 66.740 -0.139 -0.133
## 131 kWh =~ v51 4.384 0.038 0.036
## 132 kWh =~ v52 73.410 0.145 0.140
## 133 kWh =~ v53 53.295 0.119 0.114
## 134 kWh =~ v54 73.838 0.140 0.135
## 135 kWh =~ v55 52.965 0.120 0.115
## 136 kWh =~ v61 9.564 0.060 0.058
## 137 kWh =~ v62 10.840 0.058 0.056
## 138 kWh =~ v63 1.921 0.023 0.022
## 139 kWh =~ v64 0.106 0.005 0.005
## 140 kWh =~ v65 6.762 -0.043 -0.041
## 141 v11 ~~ v12 10.247 0.031 0.031
## 142 v11 ~~ v13 21.859 0.045 0.045
## 143 v11 ~~ v14 6.835 -0.025 -0.025
## 144 v11 ~~ v15 3.493 -0.017 -0.017
## 145 v11 ~~ v21 22.567 0.061 0.061
## 146 v11 ~~ v22 1.237 0.013 0.013
## 147 v11 ~~ v23 0.021 -0.002 -0.002
## 148 v11 ~~ v24 18.266 -0.048 -0.048
## 149 v11 ~~ v25 15.580 -0.046 -0.046
## 150 v11 ~~ v31 57.698 0.106 0.106
## 151 v11 ~~ v32 3.435 0.024 0.024
## 152 v11 ~~ v33 0.927 0.012 0.012
## 153 v11 ~~ v34 3.131 -0.021 -0.021
## 154 v11 ~~ v35 3.283 -0.022 -0.022
## 155 v11 ~~ v41 115.255 0.152 0.152
## 156 v11 ~~ v42 0.852 0.012 0.012
## 157 v11 ~~ v43 1.284 -0.014 -0.014
## 158 v11 ~~ v44 4.995 -0.027 -0.027
## 159 v11 ~~ v45 11.455 -0.042 -0.042
## 160 v11 ~~ v51 34.714 0.078 0.078
## 161 v11 ~~ v52 0.220 0.006 0.006
## 162 v11 ~~ v53 2.018 -0.017 -0.017
## 163 v11 ~~ v54 23.496 -0.058 -0.058
## 164 v11 ~~ v55 2.580 -0.019 -0.019
## 165 v11 ~~ v61 136.140 0.166 0.166
## 166 v11 ~~ v62 1.507 0.016 0.016
## 167 v11 ~~ v63 6.246 -0.029 -0.029
## 168 v11 ~~ v64 28.648 -0.061 -0.061
## 169 v11 ~~ v65 15.745 -0.047 -0.047
## 170 v11 ~~ v71 0.278 -0.003 -0.003
## 171 v11 ~~ v72 1.548 -0.006 -0.006
## 172 v11 ~~ v73 0.100 -0.001 -0.001
## 173 v11 ~~ v74 0.290 0.003 0.003
## 174 v11 ~~ v75 0.199 -0.002 -0.002
## 175 v12 ~~ v13 1.935 0.010 0.010
## 176 v12 ~~ v14 6.651 -0.019 -0.019
## 177 v12 ~~ v15 0.558 -0.005 -0.005
## 178 v12 ~~ v21 0.624 0.007 0.007
## 179 v12 ~~ v22 13.217 0.030 0.030
## 180 v12 ~~ v23 1.706 0.011 0.011
## 181 v12 ~~ v24 1.510 -0.010 -0.010
## 182 v12 ~~ v25 2.083 0.012 0.012
## 183 v12 ~~ v31 1.034 -0.010 -0.010
## 184 v12 ~~ v32 1.732 0.012 0.012
## 185 v12 ~~ v33 2.179 0.013 0.013
## 186 v12 ~~ v34 13.553 -0.032 -0.032
## 187 v12 ~~ v35 22.498 -0.041 -0.041
## 188 v12 ~~ v41 0.338 -0.006 -0.006
## 189 v12 ~~ v42 80.707 0.081 0.081
## 190 v12 ~~ v43 0.890 -0.008 -0.008
## 191 v12 ~~ v44 17.480 -0.037 -0.037
## 192 v12 ~~ v45 1.542 -0.011 -0.011
## 193 v12 ~~ v51 0.234 -0.005 -0.005
## 194 v12 ~~ v52 3.252 0.016 0.016
## 195 v12 ~~ v53 0.079 0.002 0.002
## 196 v12 ~~ v54 13.468 -0.031 -0.031
## 197 v12 ~~ v55 0.704 -0.007 -0.007
## 198 v12 ~~ v61 0.095 -0.003 -0.003
## 199 v12 ~~ v62 80.779 0.082 0.082
## 200 v12 ~~ v63 0.296 -0.005 -0.005
## 201 v12 ~~ v64 19.387 -0.036 -0.036
## 202 v12 ~~ v65 2.275 -0.013 -0.013
## 203 v12 ~~ v71 5.207 -0.008 -0.008
## 204 v12 ~~ v72 0.207 -0.001 -0.001
## 205 v12 ~~ v73 0.651 0.003 0.003
## 206 v12 ~~ v74 0.071 -0.001 -0.001
## 207 v12 ~~ v75 1.619 0.005 0.005
## 208 v13 ~~ v14 2.094 -0.011 -0.011
## 209 v13 ~~ v15 28.121 -0.039 -0.039
## 210 v13 ~~ v21 0.547 -0.007 -0.007
## 211 v13 ~~ v22 0.496 -0.006 -0.006
## 212 v13 ~~ v23 18.930 0.036 0.036
## 213 v13 ~~ v24 0.177 -0.003 -0.003
## 214 v13 ~~ v25 19.837 -0.037 -0.037
## 215 v13 ~~ v31 2.224 0.015 0.015
## 216 v13 ~~ v32 2.452 -0.014 -0.014
## 217 v13 ~~ v33 16.996 0.035 0.035
## 218 v13 ~~ v34 10.350 -0.028 -0.028
## 219 v13 ~~ v35 0.117 0.003 0.003
## 220 v13 ~~ v41 1.770 0.013 0.013
## 221 v13 ~~ v42 0.346 0.005 0.005
## 222 v13 ~~ v43 175.360 0.119 0.119
## 223 v13 ~~ v44 1.635 0.011 0.011
## 224 v13 ~~ v45 4.167 -0.018 -0.018
## 225 v13 ~~ v51 21.967 -0.045 -0.045
## 226 v13 ~~ v52 24.479 -0.044 -0.044
## 227 v13 ~~ v53 0.143 0.003 0.003
## 228 v13 ~~ v54 0.782 -0.008 -0.008
## 229 v13 ~~ v55 27.901 -0.046 -0.046
## 230 v13 ~~ v61 14.833 -0.039 -0.039
## 231 v13 ~~ v62 1.951 -0.013 -0.013
## 232 v13 ~~ v63 177.240 0.113 0.113
## 233 v13 ~~ v64 4.680 -0.018 -0.018
## 234 v13 ~~ v65 8.404 -0.025 -0.025
## 235 v13 ~~ v71 0.293 -0.002 -0.002
## 236 v13 ~~ v72 5.016 -0.007 -0.007
## 237 v13 ~~ v73 0.113 0.001 0.001
## 238 v13 ~~ v74 2.137 0.005 0.005
## 239 v13 ~~ v75 0.037 0.001 0.001
## 240 v14 ~~ v15 40.066 0.047 0.047
## 241 v14 ~~ v21 0.587 0.007 0.007
## 242 v14 ~~ v22 2.159 -0.012 -0.012
## 243 v14 ~~ v23 1.961 -0.011 -0.011
## 244 v14 ~~ v24 14.846 0.030 0.030
## 245 v14 ~~ v25 3.003 -0.014 -0.014
## 246 v14 ~~ v31 2.921 -0.016 -0.016
## 247 v14 ~~ v32 1.313 -0.010 -0.010
## 248 v14 ~~ v33 24.132 -0.041 -0.041
## 249 v14 ~~ v34 28.989 0.045 0.045
## 250 v14 ~~ v35 0.430 0.005 0.005
## 251 v14 ~~ v41 1.514 -0.012 -0.012
## 252 v14 ~~ v42 3.209 -0.016 -0.016
## 253 v14 ~~ v43 4.850 -0.019 -0.019
## 254 v14 ~~ v44 109.596 0.089 0.089
## 255 v14 ~~ v45 0.384 -0.005 -0.005
## 256 v14 ~~ v51 0.000 0.000 0.000
## 257 v14 ~~ v52 0.254 -0.004 -0.004
## 258 v14 ~~ v53 12.496 -0.029 -0.029
## 259 v14 ~~ v54 23.428 0.040 0.040
## 260 v14 ~~ v55 18.776 -0.036 -0.036
## 261 v14 ~~ v61 22.497 -0.047 -0.047
## 262 v14 ~~ v62 25.944 -0.045 -0.045
## 263 v14 ~~ v63 30.452 -0.045 -0.045
## 264 v14 ~~ v64 146.220 0.095 0.095
## 265 v14 ~~ v65 7.255 -0.022 -0.022
## 266 v14 ~~ v71 1.893 0.005 0.005
## 267 v14 ~~ v72 1.634 0.004 0.004
## 268 v14 ~~ v73 0.018 0.000 0.000
## 269 v14 ~~ v74 0.216 -0.002 -0.002
## 270 v14 ~~ v75 6.819 -0.009 -0.009
## 271 v15 ~~ v21 15.475 -0.034 -0.034
## 272 v15 ~~ v22 9.030 -0.024 -0.024
## 273 v15 ~~ v23 14.784 -0.030 -0.030
## 274 v15 ~~ v24 0.349 -0.005 -0.005
## 275 v15 ~~ v25 59.046 0.061 0.061
## 276 v15 ~~ v31 6.177 -0.024 -0.024
## 277 v15 ~~ v32 0.255 -0.004 -0.004
## 278 v15 ~~ v33 0.130 -0.003 -0.003
## 279 v15 ~~ v34 0.691 0.007 0.007
## 280 v15 ~~ v35 50.170 0.059 0.059
## 281 v15 ~~ v41 0.535 -0.007 -0.007
## 282 v15 ~~ v42 28.441 -0.046 -0.046
## 283 v15 ~~ v43 10.125 -0.027 -0.027
## 284 v15 ~~ v44 0.835 -0.008 -0.008
## 285 v15 ~~ v45 149.546 0.105 0.105
## 286 v15 ~~ v51 0.042 -0.002 -0.002
## 287 v15 ~~ v52 13.153 -0.031 -0.031
## 288 v15 ~~ v53 0.299 -0.004 -0.004
## 289 v15 ~~ v54 0.465 -0.006 -0.006
## 290 v15 ~~ v55 23.765 0.040 0.040
## 291 v15 ~~ v61 2.091 -0.014 -0.014
## 292 v15 ~~ v62 25.244 -0.044 -0.044
## 293 v15 ~~ v63 40.306 -0.051 -0.051
## 294 v15 ~~ v64 8.013 -0.022 -0.022
## 295 v15 ~~ v65 140.554 0.096 0.096
## 296 v15 ~~ v71 0.323 0.002 0.002
## 297 v15 ~~ v72 3.101 0.005 0.005
## 298 v15 ~~ v73 0.201 -0.001 -0.001
## 299 v15 ~~ v74 2.432 -0.005 -0.005
## 300 v15 ~~ v75 3.595 -0.006 -0.006
## 301 v21 ~~ v22 42.831 0.078 0.078
## 302 v21 ~~ v23 47.413 0.081 0.081
## 303 v21 ~~ v24 9.603 0.036 0.036
## 304 v21 ~~ v25 2.864 0.020 0.020
## 305 v21 ~~ v31 33.391 0.081 0.081
## 306 v21 ~~ v32 5.376 -0.030 -0.030
## 307 v21 ~~ v33 50.184 -0.087 -0.087
## 308 v21 ~~ v34 35.645 -0.073 -0.073
## 309 v21 ~~ v35 71.943 -0.105 -0.105
## 310 v21 ~~ v41 100.905 0.142 0.142
## 311 v21 ~~ v42 10.245 0.040 0.040
## 312 v21 ~~ v43 0.859 -0.012 -0.012
## 313 v21 ~~ v44 5.338 -0.028 -0.028
## 314 v21 ~~ v45 11.312 -0.042 -0.042
## 315 v21 ~~ v51 37.673 0.082 0.082
## 316 v21 ~~ v52 0.024 0.002 0.002
## 317 v21 ~~ v53 2.267 -0.018 -0.018
## 318 v21 ~~ v54 20.347 -0.054 -0.054
## 319 v21 ~~ v55 2.084 -0.018 -0.018
## 320 v21 ~~ v61 46.282 0.097 0.097
## 321 v21 ~~ v62 1.410 0.015 0.015
## 322 v21 ~~ v63 0.332 -0.007 -0.007
## 323 v21 ~~ v64 0.060 -0.003 -0.003
## 324 v21 ~~ v65 25.776 -0.060 -0.060
## 325 v21 ~~ v71 10.381 -0.016 -0.016
## 326 v21 ~~ v72 0.079 -0.001 -0.001
## 327 v21 ~~ v73 0.010 0.000 0.000
## 328 v21 ~~ v74 5.856 0.012 0.012
## 329 v21 ~~ v75 0.234 0.002 0.002
## 330 v22 ~~ v23 123.195 0.120 0.120
## 331 v22 ~~ v24 84.573 0.098 0.098
## 332 v22 ~~ v25 30.112 0.061 0.061
## 333 v22 ~~ v31 31.398 -0.072 -0.072
## 334 v22 ~~ v32 0.201 -0.005 -0.005
## 335 v22 ~~ v33 81.345 -0.102 -0.102
## 336 v22 ~~ v34 63.034 -0.090 -0.090
## 337 v22 ~~ v35 82.561 -0.104 -0.104
## 338 v22 ~~ v41 14.255 -0.049 -0.049
## 339 v22 ~~ v42 1.642 0.015 0.015
## 340 v22 ~~ v43 34.019 -0.067 -0.067
## 341 v22 ~~ v44 15.250 -0.044 -0.044
## 342 v22 ~~ v45 15.717 -0.046 -0.046
## 343 v22 ~~ v51 0.180 0.005 0.005
## 344 v22 ~~ v52 55.926 0.086 0.086
## 345 v22 ~~ v53 8.811 0.032 0.032
## 346 v22 ~~ v54 14.876 0.042 0.042
## 347 v22 ~~ v55 2.893 0.019 0.019
## 348 v22 ~~ v61 0.813 -0.012 -0.012
## 349 v22 ~~ v62 13.412 0.043 0.043
## 350 v22 ~~ v63 1.459 -0.013 -0.013
## 351 v22 ~~ v64 17.678 -0.044 -0.044
## 352 v22 ~~ v65 6.456 -0.028 -0.028
## 353 v22 ~~ v71 0.002 0.000 0.000
## 354 v22 ~~ v72 1.828 -0.005 -0.005
## 355 v22 ~~ v73 0.043 0.001 0.001
## 356 v22 ~~ v74 0.705 0.004 0.004
## 357 v22 ~~ v75 0.428 0.003 0.003
## 358 v23 ~~ v24 165.033 0.134 0.134
## 359 v23 ~~ v25 67.212 0.089 0.089
## 360 v23 ~~ v31 60.422 -0.099 -0.099
## 361 v23 ~~ v32 123.986 -0.131 -0.131
## 362 v23 ~~ v33 29.775 -0.061 -0.061
## 363 v23 ~~ v34 117.919 -0.121 -0.121
## 364 v23 ~~ v35 80.019 -0.100 -0.100
## 365 v23 ~~ v41 25.681 -0.064 -0.064
## 366 v23 ~~ v42 2.019 -0.016 -0.016
## 367 v23 ~~ v43 24.730 0.056 0.056
## 368 v23 ~~ v44 16.980 -0.045 -0.045
## 369 v23 ~~ v45 54.463 -0.083 -0.083
## 370 v23 ~~ v51 2.116 -0.017 -0.017
## 371 v23 ~~ v52 0.016 -0.001 -0.001
## 372 v23 ~~ v53 98.640 0.107 0.107
## 373 v23 ~~ v54 10.953 0.036 0.036
## 374 v23 ~~ v55 0.455 -0.007 -0.007
## 375 v23 ~~ v61 1.683 -0.017 -0.017
## 376 v23 ~~ v62 2.993 -0.020 -0.020
## 377 v23 ~~ v63 29.482 0.058 0.058
## 378 v23 ~~ v64 4.126 -0.021 -0.021
## 379 v23 ~~ v65 12.625 -0.038 -0.038
## 380 v23 ~~ v71 0.055 -0.001 -0.001
## 381 v23 ~~ v72 0.039 0.001 0.001
## 382 v23 ~~ v73 0.198 0.002 0.002
## 383 v23 ~~ v74 0.424 -0.003 -0.003
## 384 v23 ~~ v75 0.176 0.002 0.002
## 385 v24 ~~ v25 132.249 0.122 0.122
## 386 v24 ~~ v31 116.042 -0.133 -0.133
## 387 v24 ~~ v32 108.967 -0.120 -0.120
## 388 v24 ~~ v33 120.953 -0.120 -0.120
## 389 v24 ~~ v34 1.794 -0.015 -0.015
## 390 v24 ~~ v35 61.467 -0.086 -0.086
## 391 v24 ~~ v41 31.540 -0.069 -0.069
## 392 v24 ~~ v42 29.925 -0.060 -0.060
## 393 v24 ~~ v43 26.981 -0.057 -0.057
## 394 v24 ~~ v44 5.787 0.026 0.026
## 395 v24 ~~ v45 10.466 -0.036 -0.036
## 396 v24 ~~ v51 10.088 -0.037 -0.037
## 397 v24 ~~ v52 2.958 0.019 0.019
## 398 v24 ~~ v53 21.462 0.048 0.048
## 399 v24 ~~ v54 106.082 0.108 0.108
## 400 v24 ~~ v55 8.100 0.030 0.030
## 401 v24 ~~ v61 19.857 -0.056 -0.056
## 402 v24 ~~ v62 11.751 -0.038 -0.038
## 403 v24 ~~ v63 0.622 -0.008 -0.008
## 404 v24 ~~ v64 10.394 0.032 0.032
## 405 v24 ~~ v65 2.623 -0.017 -0.017
## 406 v24 ~~ v71 1.732 -0.006 -0.006
## 407 v24 ~~ v72 1.098 0.004 0.004
## 408 v24 ~~ v73 0.346 0.002 0.002
## 409 v24 ~~ v74 1.443 -0.005 -0.005
## 410 v24 ~~ v75 8.453 0.013 0.013
## 411 v25 ~~ v31 41.517 -0.083 -0.083
## 412 v25 ~~ v32 32.611 -0.068 -0.068
## 413 v25 ~~ v33 91.526 -0.108 -0.108
## 414 v25 ~~ v34 13.911 -0.042 -0.042
## 415 v25 ~~ v35 12.675 -0.041 -0.041
## 416 v25 ~~ v41 24.105 -0.064 -0.064
## 417 v25 ~~ v42 14.030 -0.043 -0.043
## 418 v25 ~~ v43 29.005 -0.062 -0.062
## 419 v25 ~~ v44 44.278 -0.075 -0.075
## 420 v25 ~~ v45 10.102 0.037 0.037
## 421 v25 ~~ v51 0.392 -0.008 -0.008
## 422 v25 ~~ v52 0.006 -0.001 -0.001
## 423 v25 ~~ v53 1.296 0.012 0.012
## 424 v25 ~~ v54 27.600 0.058 0.058
## 425 v25 ~~ v55 162.443 0.142 0.142
## 426 v25 ~~ v61 0.096 0.004 0.004
## 427 v25 ~~ v62 14.960 -0.045 -0.045
## 428 v25 ~~ v63 2.614 -0.018 -0.018
## 429 v25 ~~ v64 35.859 -0.062 -0.062
## 430 v25 ~~ v65 12.423 0.038 0.038
## 431 v25 ~~ v71 7.474 0.012 0.012
## 432 v25 ~~ v72 2.581 -0.007 -0.007
## 433 v25 ~~ v73 0.108 -0.001 -0.001
## 434 v25 ~~ v74 2.495 -0.007 -0.007
## 435 v25 ~~ v75 4.306 0.009 0.009
## 436 v31 ~~ v32 121.275 0.155 0.155
## 437 v31 ~~ v33 92.675 0.128 0.128
## 438 v31 ~~ v34 17.261 0.055 0.055
## 439 v31 ~~ v35 38.307 0.083 0.083
## 440 v31 ~~ v41 115.029 0.166 0.166
## 441 v31 ~~ v42 1.984 0.019 0.019
## 442 v31 ~~ v43 1.634 -0.018 -0.018
## 443 v31 ~~ v44 0.338 0.008 0.008
## 444 v31 ~~ v45 0.457 0.009 0.009
## 445 v31 ~~ v51 14.080 0.055 0.055
## 446 v31 ~~ v52 1.266 -0.015 -0.015
## 447 v31 ~~ v53 35.488 -0.078 -0.078
## 448 v31 ~~ v54 21.547 -0.061 -0.061
## 449 v31 ~~ v55 15.622 -0.052 -0.052
## 450 v31 ~~ v61 87.096 0.145 0.145
## 451 v31 ~~ v62 7.231 0.038 0.038
## 452 v31 ~~ v63 2.129 -0.019 -0.019
## 453 v31 ~~ v64 1.042 -0.013 -0.013
## 454 v31 ~~ v65 4.453 -0.027 -0.027
## 455 v31 ~~ v71 0.194 -0.002 -0.002
## 456 v31 ~~ v72 0.304 0.003 0.003
## 457 v31 ~~ v73 0.266 -0.003 -0.003
## 458 v31 ~~ v74 1.711 0.007 0.007
## 459 v31 ~~ v75 0.318 -0.003 -0.003
## 460 v32 ~~ v33 138.181 0.145 0.145
## 461 v32 ~~ v34 46.460 0.084 0.084
## 462 v32 ~~ v35 54.471 0.092 0.092
## 463 v32 ~~ v41 11.014 0.047 0.047
## 464 v32 ~~ v42 46.780 0.086 0.086
## 465 v32 ~~ v43 0.063 0.003 0.003
## 466 v32 ~~ v44 0.087 -0.004 -0.004
## 467 v32 ~~ v45 0.870 -0.012 -0.012
## 468 v32 ~~ v51 0.000 0.000 0.000
## 469 v32 ~~ v52 3.104 0.022 0.022
## 470 v32 ~~ v53 26.772 -0.062 -0.062
## 471 v32 ~~ v54 18.160 -0.051 -0.051
## 472 v32 ~~ v55 0.002 0.001 0.001
## 473 v32 ~~ v61 4.142 0.029 0.029
## 474 v32 ~~ v62 40.195 0.081 0.081
## 475 v32 ~~ v63 0.091 -0.004 -0.004
## 476 v32 ~~ v64 3.256 -0.021 -0.021
## 477 v32 ~~ v65 10.012 -0.038 -0.038
## 478 v32 ~~ v71 0.871 0.005 0.005
## 479 v32 ~~ v72 0.220 -0.002 -0.002
## 480 v32 ~~ v73 0.261 -0.002 -0.002
## 481 v32 ~~ v74 5.043 -0.011 -0.011
## 482 v32 ~~ v75 4.866 0.011 0.011
## 483 v33 ~~ v34 90.331 0.111 0.111
## 484 v33 ~~ v35 168.759 0.152 0.152
## 485 v33 ~~ v41 21.053 0.061 0.061
## 486 v33 ~~ v42 7.979 0.034 0.034
## 487 v33 ~~ v43 66.447 0.097 0.097
## 488 v33 ~~ v44 4.087 0.023 0.023
## 489 v33 ~~ v45 9.413 0.036 0.036
## 490 v33 ~~ v51 0.001 0.000 0.000
## 491 v33 ~~ v52 14.540 -0.045 -0.045
## 492 v33 ~~ v53 0.295 -0.006 -0.006
## 493 v33 ~~ v54 57.281 -0.086 -0.086
## 494 v33 ~~ v55 52.376 -0.083 -0.083
## 495 v33 ~~ v61 1.119 0.014 0.014
## 496 v33 ~~ v62 4.595 0.026 0.026
## 497 v33 ~~ v63 21.961 0.052 0.052
## 498 v33 ~~ v64 0.986 0.011 0.011
## 499 v33 ~~ v65 0.323 -0.006 -0.006
## 500 v33 ~~ v71 1.799 -0.006 -0.006
## 501 v33 ~~ v72 1.479 0.005 0.005
## 502 v33 ~~ v73 0.005 0.000 0.000
## 503 v33 ~~ v74 0.938 -0.004 -0.004
## 504 v33 ~~ v75 0.000 0.000 0.000
## 505 v34 ~~ v35 114.536 0.126 0.126
## 506 v34 ~~ v41 4.699 -0.029 -0.029
## 507 v34 ~~ v42 0.660 0.010 0.010
## 508 v34 ~~ v43 16.355 0.048 0.048
## 509 v34 ~~ v44 105.615 0.119 0.119
## 510 v34 ~~ v45 24.603 0.059 0.059
## 511 v34 ~~ v51 23.800 -0.061 -0.061
## 512 v34 ~~ v52 23.108 -0.057 -0.057
## 513 v34 ~~ v53 38.190 -0.070 -0.070
## 514 v34 ~~ v54 17.371 -0.047 -0.047
## 515 v34 ~~ v55 10.837 -0.038 -0.038
## 516 v34 ~~ v61 9.725 -0.042 -0.042
## 517 v34 ~~ v62 1.911 -0.017 -0.017
## 518 v34 ~~ v63 1.244 -0.012 -0.012
## 519 v34 ~~ v64 56.370 0.080 0.080
## 520 v34 ~~ v65 16.888 0.046 0.046
## 521 v34 ~~ v71 2.317 0.007 0.007
## 522 v34 ~~ v72 1.396 -0.005 -0.005
## 523 v34 ~~ v73 1.599 0.005 0.005
## 524 v34 ~~ v74 0.547 0.003 0.003
## 525 v34 ~~ v75 4.000 -0.009 -0.009
## 526 v35 ~~ v41 15.942 0.054 0.054
## 527 v35 ~~ v42 3.093 0.021 0.021
## 528 v35 ~~ v43 21.229 0.055 0.055
## 529 v35 ~~ v44 18.539 0.050 0.050
## 530 v35 ~~ v45 53.914 0.088 0.088
## 531 v35 ~~ v51 15.631 -0.050 -0.050
## 532 v35 ~~ v52 27.795 -0.063 -0.063
## 533 v35 ~~ v53 56.882 -0.086 -0.086
## 534 v35 ~~ v54 15.020 -0.044 -0.044
## 535 v35 ~~ v55 2.992 -0.020 -0.020
## 536 v35 ~~ v61 0.586 0.010 0.010
## 537 v35 ~~ v62 5.540 -0.029 -0.029
## 538 v35 ~~ v63 0.382 -0.007 -0.007
## 539 v35 ~~ v64 0.847 0.010 0.010
## 540 v35 ~~ v65 51.159 0.081 0.081
## 541 v35 ~~ v71 0.484 -0.003 -0.003
## 542 v35 ~~ v72 3.085 0.007 0.007
## 543 v35 ~~ v73 1.348 -0.005 -0.005
## 544 v35 ~~ v74 0.437 0.003 0.003
## 545 v35 ~~ v75 1.885 -0.007 -0.007
## 546 v41 ~~ v42 149.079 0.172 0.172
## 547 v41 ~~ v43 39.681 0.089 0.089
## 548 v41 ~~ v44 27.187 0.072 0.072
## 549 v41 ~~ v45 32.061 0.080 0.080
## 550 v41 ~~ v51 5.580 0.035 0.035
## 551 v41 ~~ v52 110.853 -0.148 -0.148
## 552 v41 ~~ v53 89.874 -0.128 -0.128
## 553 v41 ~~ v54 116.665 -0.146 -0.146
## 554 v41 ~~ v55 141.985 -0.162 -0.162
## 555 v41 ~~ v61 262.235 0.256 0.256
## 556 v41 ~~ v62 0.208 -0.006 -0.006
## 557 v41 ~~ v63 0.814 -0.012 -0.012
## 558 v41 ~~ v64 0.006 -0.001 -0.001
## 559 v41 ~~ v65 0.911 -0.013 -0.013
## 560 v41 ~~ v71 0.996 -0.005 -0.005
## 561 v41 ~~ v72 0.642 0.004 0.004
## 562 v41 ~~ v73 1.642 -0.006 -0.006
## 563 v41 ~~ v74 0.259 0.003 0.003
## 564 v41 ~~ v75 0.306 -0.003 -0.003
## 565 v42 ~~ v43 132.087 0.146 0.146
## 566 v42 ~~ v44 57.245 0.094 0.094
## 567 v42 ~~ v45 76.763 0.111 0.111
## 568 v42 ~~ v51 80.746 -0.120 -0.120
## 569 v42 ~~ v52 13.279 -0.046 -0.046
## 570 v42 ~~ v53 156.641 -0.152 -0.152
## 571 v42 ~~ v54 214.120 -0.178 -0.178
## 572 v42 ~~ v55 156.423 -0.153 -0.153
## 573 v42 ~~ v61 0.599 0.011 0.011
## 574 v42 ~~ v62 226.165 0.190 0.190
## 575 v42 ~~ v63 0.611 0.009 0.009
## 576 v42 ~~ v64 0.785 0.010 0.010
## 577 v42 ~~ v65 0.020 -0.002 -0.002
## 578 v42 ~~ v71 1.584 -0.006 -0.006
## 579 v42 ~~ v72 0.773 0.004 0.004
## 580 v42 ~~ v73 7.367 0.012 0.012
## 581 v42 ~~ v74 9.021 -0.014 -0.014
## 582 v42 ~~ v75 0.200 -0.002 -0.002
## 583 v43 ~~ v44 178.813 0.166 0.166
## 584 v43 ~~ v45 122.938 0.140 0.140
## 585 v43 ~~ v51 124.269 -0.149 -0.149
## 586 v43 ~~ v52 190.684 -0.175 -0.175
## 587 v43 ~~ v53 23.356 -0.059 -0.059
## 588 v43 ~~ v54 191.425 -0.168 -0.168
## 589 v43 ~~ v55 236.875 -0.189 -0.189
## 590 v43 ~~ v61 18.711 -0.061 -0.061
## 591 v43 ~~ v62 2.798 0.021 0.021
## 592 v43 ~~ v63 172.489 0.153 0.153
## 593 v43 ~~ v64 2.588 0.018 0.018
## 594 v43 ~~ v65 0.253 -0.006 -0.006
## 595 v43 ~~ v71 0.037 -0.001 -0.001
## 596 v43 ~~ v72 2.007 -0.006 -0.006
## 597 v43 ~~ v73 3.873 0.009 0.009
## 598 v43 ~~ v74 3.375 -0.009 -0.009
## 599 v43 ~~ v75 0.107 -0.002 -0.002
## 600 v44 ~~ v45 239.612 0.191 0.191
## 601 v44 ~~ v51 184.864 -0.177 -0.177
## 602 v44 ~~ v52 179.361 -0.165 -0.165
## 603 v44 ~~ v53 184.467 -0.161 -0.161
## 604 v44 ~~ v54 22.153 -0.056 -0.056
## 605 v44 ~~ v55 181.304 -0.161 -0.161
## 606 v44 ~~ v61 9.113 -0.041 -0.041
## 607 v44 ~~ v62 23.753 -0.060 -0.060
## 608 v44 ~~ v63 0.836 0.010 0.010
## 609 v44 ~~ v64 247.323 0.171 0.171
## 610 v44 ~~ v65 5.732 0.027 0.027
## 611 v44 ~~ v71 3.784 -0.009 -0.009
## 612 v44 ~~ v72 0.050 0.001 0.001
## 613 v44 ~~ v73 0.678 0.004 0.004
## 614 v44 ~~ v74 0.856 0.004 0.004
## 615 v44 ~~ v75 1.265 -0.005 -0.005
## 616 v45 ~~ v51 136.480 -0.156 -0.156
## 617 v45 ~~ v52 224.373 -0.189 -0.189
## 618 v45 ~~ v53 198.340 -0.170 -0.170
## 619 v45 ~~ v54 163.247 -0.155 -0.155
## 620 v45 ~~ v55 9.884 -0.039 -0.039
## 621 v45 ~~ v61 2.969 0.024 0.024
## 622 v45 ~~ v62 7.369 -0.034 -0.034
## 623 v45 ~~ v63 10.693 -0.038 -0.038
## 624 v45 ~~ v64 1.244 -0.012 -0.012
## 625 v45 ~~ v65 388.455 0.231 0.231
## 626 v45 ~~ v71 0.020 -0.001 -0.001
## 627 v45 ~~ v72 0.000 0.000 0.000
## 628 v45 ~~ v73 3.582 0.008 0.008
## 629 v45 ~~ v74 1.220 0.005 0.005
## 630 v45 ~~ v75 27.011 -0.025 -0.025
## 631 v51 ~~ v52 257.292 0.214 0.214
## 632 v51 ~~ v53 235.618 0.196 0.196
## 633 v51 ~~ v54 179.076 0.172 0.172
## 634 v51 ~~ v55 130.186 0.147 0.147
## 635 v51 ~~ v61 17.801 0.063 0.063
## 636 v51 ~~ v62 2.903 -0.023 -0.023
## 637 v51 ~~ v63 5.094 -0.028 -0.028
## 638 v51 ~~ v64 20.677 -0.054 -0.054
## 639 v51 ~~ v65 23.719 -0.060 -0.060
## 640 v51 ~~ v71 0.074 0.001 0.001
## 641 v51 ~~ v72 0.005 0.000 0.000
## 642 v51 ~~ v73 0.030 -0.001 -0.001
## 643 v51 ~~ v74 0.034 -0.001 -0.001
## 644 v51 ~~ v75 0.028 0.001 0.001
## 645 v52 ~~ v53 299.460 0.209 0.209
## 646 v52 ~~ v54 261.641 0.196 0.196
## 647 v52 ~~ v55 334.487 0.224 0.224
## 648 v52 ~~ v61 2.834 -0.024 -0.024
## 649 v52 ~~ v62 14.179 0.047 0.047
## 650 v52 ~~ v63 16.369 -0.047 -0.047
## 651 v52 ~~ v64 5.858 -0.027 -0.027
## 652 v52 ~~ v65 14.510 -0.044 -0.044
## 653 v52 ~~ v71 2.705 -0.008 -0.008
## 654 v52 ~~ v72 2.136 0.006 0.006
## 655 v52 ~~ v73 0.002 0.000 0.000
## 656 v52 ~~ v74 1.056 0.005 0.005
## 657 v52 ~~ v75 0.966 0.005 0.005
## 658 v53 ~~ v54 333.250 0.213 0.213
## 659 v53 ~~ v55 275.746 0.195 0.195
## 660 v53 ~~ v61 16.720 -0.055 -0.055
## 661 v53 ~~ v62 0.268 0.006 0.006
## 662 v53 ~~ v63 8.483 0.032 0.032
## 663 v53 ~~ v64 21.354 -0.049 -0.049
## 664 v53 ~~ v65 41.609 -0.072 -0.072
## 665 v53 ~~ v71 0.084 0.001 0.001
## 666 v53 ~~ v72 0.404 -0.003 -0.003
## 667 v53 ~~ v73 0.044 -0.001 -0.001
## 668 v53 ~~ v74 0.196 0.002 0.002
## 669 v53 ~~ v75 2.857 0.008 0.008
## 670 v54 ~~ v55 464.568 0.254 0.254
## 671 v54 ~~ v61 3.061 -0.023 -0.023
## 672 v54 ~~ v62 38.376 -0.075 -0.075
## 673 v54 ~~ v63 22.528 -0.053 -0.053
## 674 v54 ~~ v64 0.299 0.006 0.006
## 675 v54 ~~ v65 26.812 -0.058 -0.058
## 676 v54 ~~ v71 10.778 0.015 0.015
## 677 v54 ~~ v72 5.291 -0.010 -0.010
## 678 v54 ~~ v73 5.926 -0.010 -0.010
## 679 v54 ~~ v74 0.778 0.004 0.004
## 680 v54 ~~ v75 10.024 0.015 0.015
## 681 v55 ~~ v61 0.207 -0.006 -0.006
## 682 v55 ~~ v62 8.635 -0.036 -0.036
## 683 v55 ~~ v63 21.695 -0.052 -0.052
## 684 v55 ~~ v64 46.703 -0.074 -0.074
## 685 v55 ~~ v65 1.435 0.014 0.014
## 686 v55 ~~ v71 6.910 0.012 0.012
## 687 v55 ~~ v72 0.137 -0.002 -0.002
## 688 v55 ~~ v73 9.675 -0.013 -0.013
## 689 v55 ~~ v74 1.064 0.005 0.005
## 690 v55 ~~ v75 2.759 0.008 0.008
## 691 v61 ~~ v62 11.620 0.050 0.050
## 692 v61 ~~ v63 0.724 -0.012 -0.012
## 693 v61 ~~ v64 10.964 -0.044 -0.044
## 694 v61 ~~ v65 0.003 -0.001 -0.001
## 695 v61 ~~ v71 1.303 -0.006 -0.006
## 696 v61 ~~ v72 0.090 0.001 0.001
## 697 v61 ~~ v73 0.971 -0.005 -0.005
## 698 v61 ~~ v74 2.386 0.008 0.008
## 699 v61 ~~ v75 1.458 0.007 0.007
## 700 v62 ~~ v63 15.427 0.050 0.050
## 701 v62 ~~ v64 4.987 -0.028 -0.028
## 702 v62 ~~ v65 3.935 -0.025 -0.025
## 703 v62 ~~ v71 0.019 0.001 0.001
## 704 v62 ~~ v72 0.079 0.001 0.001
## 705 v62 ~~ v73 5.089 -0.010 -0.010
## 706 v62 ~~ v74 1.408 0.006 0.006
## 707 v62 ~~ v75 2.782 0.008 0.008
## 708 v63 ~~ v64 4.166 0.025 0.025
## 709 v63 ~~ v65 18.331 -0.053 -0.053
## 710 v63 ~~ v71 0.041 0.001 0.001
## 711 v63 ~~ v72 0.054 0.001 0.001
## 712 v63 ~~ v73 6.045 0.010 0.010
## 713 v63 ~~ v74 9.174 -0.013 -0.013
## 714 v63 ~~ v75 0.664 0.004 0.004
## 715 v64 ~~ v65 18.681 0.053 0.053
## 716 v64 ~~ v71 3.580 -0.008 -0.008
## 717 v64 ~~ v72 0.990 0.004 0.004
## 718 v64 ~~ v73 0.068 -0.001 -0.001
## 719 v64 ~~ v74 6.293 0.011 0.011
## 720 v64 ~~ v75 1.327 -0.005 -0.005
## 721 v65 ~~ v71 0.005 0.000 0.000
## 722 v65 ~~ v72 0.214 0.002 0.002
## 723 v65 ~~ v73 2.173 0.006 0.006
## 724 v65 ~~ v74 0.589 0.003 0.003
## 725 v65 ~~ v75 13.823 -0.017 -0.017
## 726 v71 ~~ v72 6.988 -0.005 -0.005
## 727 v71 ~~ v73 5.919 -0.005 -0.005
## 728 v71 ~~ v74 0.930 -0.002 -0.002
## 729 v71 ~~ v75 39.307 0.013 0.013
## 730 v72 ~~ v73 373.140 0.036 0.036
## 731 v72 ~~ v74 52.441 -0.014 -0.014
## 732 v72 ~~ v75 103.277 -0.020 -0.020
## 733 v73 ~~ v74 43.591 -0.013 -0.013
## 734 v73 ~~ v75 125.333 -0.022 -0.022
## 735 v74 ~~ v75 253.533 0.033 0.033
## 736 Att ~~ kWh 285.949 -0.195 -0.321
## 737 Sub ~~ kWh 65.878 0.089 0.148
## 738 PBC ~~ kWh 34.677 0.076 0.158
## 739 Int ~~ kWh 45.266 0.084 0.314
## 740 Int ~ kWh 19.983 0.061 0.114
## 741 Int ~ energypoverty_MEAN 38.281 0.053 0.103
## 742 Int ~ initial_survey_age 38.502 0.004 0.007
## 743 Int ~ initial_survey_hhsize 42.618 0.040 0.079
## 744 Int ~ initial_survey_gender 16.311 0.062 0.122
## 745 kWh ~ Att 267.866 -0.739 -0.505
## 746 kWh ~ Sub 16.921 0.230 0.155
## 747 Att ~ kWh 303.329 -0.228 -0.334
## 748 Att ~ energypoverty_MEAN 3.351 -0.023 -0.036
## 749 Att ~ initial_survey_age 22.207 0.004 0.006
## 750 Att ~ initial_survey_hhsize 1.716 0.012 0.019
## 751 Att ~ initial_survey_gender 9.963 -0.073 -0.111
## 752 Sub ~ kWh 52.539 0.090 0.133
## 753 Sub ~ energypoverty_MEAN 1.477 0.014 0.022
## 754 Sub ~ initial_survey_age 0.640 -0.001 -0.001
## 755 Sub ~ initial_survey_hhsize 37.150 0.051 0.079
## 756 Sub ~ initial_survey_gender 2.427 0.032 0.050
## 757 PBC ~ kWh 60.448 0.109 0.203
## 758 PBC ~ energypoverty_MEAN 0.056 0.002 0.004
## 759 PBC ~ initial_survey_age 1.693 -0.001 -0.002
## 760 PBC ~ initial_survey_hhsize 95.867 -0.064 -0.125
## 761 PBC ~ initial_survey_gender 20.878 0.075 0.145
## 762 energypoverty_MEAN ~ Int 3.412 0.072 0.037
## 763 energypoverty_MEAN ~ kWh 5.286 -0.313 -0.300
## 764 energypoverty_MEAN ~ Att 1.601 -0.038 -0.025
## 765 energypoverty_MEAN ~ Sub 0.016 -0.004 -0.002
## 766 energypoverty_MEAN ~ PBC 0.159 -0.015 -0.008
## 767 initial_survey_age ~ Int 15.750 2.119 1.086
## 768 initial_survey_age ~ kWh 17.355 -7.781 -7.471
## 769 initial_survey_age ~ Att 22.147 1.936 1.269
## 770 initial_survey_age ~ Sub 0.251 0.207 0.134
## 771 initial_survey_age ~ PBC 5.244 1.194 0.616
## 772 initial_survey_hhsize ~ Int 0.385 -0.032 -0.016
## 773 initial_survey_hhsize ~ kWh 0.745 -0.156 -0.150
## 774 initial_survey_hhsize ~ Att 8.414 -0.116 -0.076
## 775 initial_survey_hhsize ~ Sub 0.002 -0.002 -0.001
## 776 initial_survey_hhsize ~ PBC 42.485 -0.329 -0.170
## 777 initial_survey_gender ~ Int 22.279 0.100 0.051
## 778 initial_survey_gender ~ kWh 20.411 -0.336 -0.322
## 779 initial_survey_gender ~ Att 0.276 0.009 0.006
## 780 initial_survey_gender ~ Sub 16.892 0.068 0.044
## 781 initial_survey_gender ~ PBC 20.237 0.093 0.048
## sepc.all sepc.nox
## 1 -0.006 -0.006
## 2 -0.058 -0.058
## 3 -0.014 -0.014
## 4 -0.037 -0.037
## 5 -0.022 -0.022
## 6 0.024 0.024
## 7 -0.016 -0.016
## 8 0.057 0.057
## 9 0.001 0.001
## 10 0.098 0.098
## 11 0.241 0.241
## 12 0.168 0.168
## 13 0.254 0.254
## 14 0.258 0.258
## 15 0.267 0.267
## 16 -0.140 -0.140
## 17 -0.260 -0.260
## 18 -0.198 -0.198
## 19 -0.185 -0.185
## 20 -0.249 -0.249
## 21 -0.064 -0.064
## 22 -0.079 -0.079
## 23 -0.045 -0.045
## 24 -0.006 -0.006
## 25 0.072 0.072
## 26 -0.021 -0.021
## 27 0.000 0.000
## 28 0.009 0.009
## 29 0.004 0.004
## 30 -0.019 -0.019
## 31 -0.059 -0.059
## 32 -0.007 -0.007
## 33 0.026 0.026
## 34 -0.015 -0.015
## 35 0.031 0.031
## 36 0.212 0.212
## 37 0.252 0.252
## 38 0.240 0.240
## 39 0.223 0.223
## 40 0.176 0.176
## 41 -0.267 -0.267
## 42 -0.209 -0.209
## 43 -0.199 -0.199
## 44 -0.193 -0.193
## 45 -0.145 -0.145
## 46 0.129 0.129
## 47 0.009 0.009
## 48 0.061 0.061
## 49 -0.065 -0.065
## 50 -0.047 -0.047
## 51 -0.023 -0.023
## 52 0.000 0.000
## 53 0.010 0.010
## 54 0.004 0.004
## 55 0.011 0.011
## 56 -0.080 -0.080
## 57 -0.017 -0.017
## 58 0.024 0.024
## 59 0.003 0.003
## 60 0.034 0.034
## 61 -0.002 -0.002
## 62 -0.030 -0.030
## 63 -0.005 -0.005
## 64 0.011 0.011
## 65 0.004 0.004
## 66 -0.067 -0.067
## 67 -0.011 -0.011
## 68 0.052 0.052
## 69 -0.026 -0.026
## 70 0.046 0.046
## 71 0.047 0.047
## 72 0.012 0.012
## 73 -0.034 -0.034
## 74 -0.027 -0.027
## 75 0.027 0.027
## 76 -0.019 -0.019
## 77 -0.003 -0.003
## 78 0.010 0.010
## 79 0.009 0.009
## 80 0.000 0.000
## 81 -0.106 -0.106
## 82 0.000 0.000
## 83 0.083 0.083
## 84 -0.025 -0.025
## 85 0.009 0.009
## 86 -0.017 -0.017
## 87 -0.142 -0.142
## 88 -0.053 -0.053
## 89 -0.103 -0.103
## 90 -0.150 -0.150
## 91 0.034 0.034
## 92 0.009 0.009
## 93 0.192 0.192
## 94 0.130 0.130
## 95 0.178 0.178
## 96 0.370 0.370
## 97 0.482 0.482
## 98 0.501 0.501
## 99 0.506 0.506
## 100 0.485 0.485
## 101 -0.413 -0.413
## 102 -0.396 -0.396
## 103 -0.391 -0.391
## 104 -0.481 -0.481
## 105 -0.447 -0.447
## 106 -0.027 -0.027
## 107 0.004 0.004
## 108 0.012 0.012
## 109 0.011 0.011
## 110 -0.005 -0.005
## 111 -0.104 -0.104
## 112 -0.045 -0.045
## 113 -0.050 -0.050
## 114 -0.019 -0.019
## 115 -0.048 -0.048
## 116 -0.016 -0.016
## 117 0.012 0.012
## 118 0.001 0.001
## 119 0.098 0.098
## 120 0.051 0.051
## 121 0.016 0.016
## 122 -0.004 -0.004
## 123 -0.050 -0.050
## 124 0.010 0.010
## 125 -0.035 -0.035
## 126 -0.118 -0.118
## 127 -0.077 -0.077
## 128 -0.126 -0.126
## 129 -0.080 -0.080
## 130 -0.134 -0.134
## 131 0.036 0.036
## 132 0.140 0.140
## 133 0.114 0.114
## 134 0.135 0.135
## 135 0.115 0.115
## 136 0.058 0.058
## 137 0.056 0.056
## 138 0.022 0.022
## 139 0.005 0.005
## 140 -0.041 -0.041
## 141 0.078 0.078
## 142 0.117 0.117
## 143 -0.068 -0.068
## 144 -0.048 -0.048
## 145 0.105 0.105
## 146 0.025 0.025
## 147 -0.003 -0.003
## 148 -0.098 -0.098
## 149 -0.089 -0.089
## 150 0.166 0.166
## 151 0.041 0.041
## 152 0.022 0.022
## 153 -0.040 -0.040
## 154 -0.040 -0.040
## 155 0.234 0.234
## 156 0.021 0.021
## 157 -0.025 -0.025
## 158 -0.050 -0.050
## 159 -0.076 -0.076
## 160 0.130 0.130
## 161 0.010 0.010
## 162 -0.032 -0.032
## 163 -0.109 -0.109
## 164 -0.036 -0.036
## 165 0.256 0.256
## 166 0.028 0.028
## 167 -0.058 -0.058
## 168 -0.128 -0.128
## 169 -0.092 -0.092
## 170 -0.012 -0.012
## 171 -0.030 -0.030
## 172 -0.008 -0.008
## 173 0.013 0.013
## 174 -0.010 -0.010
## 175 0.039 0.039
## 176 -0.076 -0.076
## 177 -0.022 -0.022
## 178 0.018 0.018
## 179 0.086 0.086
## 180 0.031 0.031
## 181 -0.029 -0.029
## 182 0.034 0.034
## 183 -0.023 -0.023
## 184 0.030 0.030
## 185 0.034 0.034
## 186 -0.086 -0.086
## 187 -0.111 -0.111
## 188 -0.013 -0.013
## 189 0.209 0.209
## 190 -0.022 -0.022
## 191 -0.098 -0.098
## 192 -0.029 -0.029
## 193 -0.011 -0.011
## 194 0.042 0.042
## 195 0.007 0.007
## 196 -0.086 -0.086
## 197 -0.020 -0.020
## 198 -0.007 -0.007
## 199 0.212 0.212
## 200 -0.013 -0.013
## 201 -0.110 -0.110
## 202 -0.037 -0.037
## 203 -0.056 -0.056
## 204 -0.012 -0.012
## 205 0.020 0.020
## 206 -0.007 -0.007
## 207 0.031 0.031
## 208 -0.045 -0.045
## 209 -0.161 -0.161
## 210 -0.017 -0.017
## 211 -0.017 -0.017
## 212 0.105 0.105
## 213 -0.010 -0.010
## 214 -0.107 -0.107
## 215 0.035 0.035
## 216 -0.037 -0.037
## 217 0.098 0.098
## 218 -0.076 -0.076
## 219 0.008 0.008
## 220 0.031 0.031
## 221 0.014 0.014
## 222 0.314 0.314
## 223 0.030 0.030
## 224 -0.048 -0.048
## 225 -0.110 -0.110
## 226 -0.117 -0.117
## 227 0.009 0.009
## 228 -0.021 -0.021
## 229 -0.126 -0.126
## 230 -0.090 -0.090
## 231 -0.034 -0.034
## 232 0.330 0.330
## 233 -0.055 -0.055
## 234 -0.072 -0.072
## 235 -0.014 -0.014
## 236 -0.058 -0.058
## 237 0.009 0.009
## 238 0.037 0.037
## 239 0.005 0.005
## 240 0.206 0.206
## 241 0.018 0.018
## 242 -0.036 -0.036
## 243 -0.035 -0.035
## 244 0.096 0.096
## 245 -0.043 -0.043
## 246 -0.041 -0.041
## 247 -0.028 -0.028
## 248 -0.120 -0.120
## 249 0.131 0.131
## 250 0.016 0.016
## 251 -0.029 -0.029
## 252 -0.044 -0.044
## 253 -0.053 -0.053
## 254 0.256 0.256
## 255 -0.015 -0.015
## 256 0.000 0.000
## 257 -0.012 -0.012
## 258 -0.087 -0.087
## 259 0.119 0.119
## 260 -0.106 -0.106
## 261 -0.113 -0.113
## 262 -0.125 -0.125
## 263 -0.141 -0.141
## 264 0.316 0.316
## 265 -0.068 -0.068
## 266 0.035 0.035
## 267 0.034 0.034
## 268 -0.004 -0.004
## 269 -0.012 -0.012
## 270 -0.067 -0.067
## 271 -0.094 -0.094
## 272 -0.073 -0.073
## 273 -0.094 -0.094
## 274 -0.015 -0.015
## 275 0.187 0.187
## 276 -0.059 -0.059
## 277 -0.012 -0.012
## 278 -0.009 -0.009
## 279 0.020 0.020
## 280 0.171 0.171
## 281 -0.017 -0.017
## 282 -0.129 -0.129
## 283 -0.077 -0.077
## 284 -0.022 -0.022
## 285 0.295 0.295
## 286 -0.005 -0.005
## 287 -0.088 -0.088
## 288 -0.013 -0.013
## 289 -0.017 -0.017
## 290 0.119 0.119
## 291 -0.034 -0.034
## 292 -0.123 -0.123
## 293 -0.161 -0.161
## 294 -0.073 -0.073
## 295 0.299 0.299
## 296 0.014 0.014
## 297 0.047 0.047
## 298 -0.012 -0.012
## 299 -0.040 -0.040
## 300 -0.048 -0.048
## 301 0.150 0.150
## 302 0.158 0.158
## 303 0.072 0.072
## 304 0.039 0.039
## 305 0.126 0.126
## 306 -0.052 -0.052
## 307 -0.160 -0.160
## 308 -0.135 -0.135
## 309 -0.191 -0.191
## 310 0.218 0.218
## 311 0.071 0.071
## 312 -0.021 -0.021
## 313 -0.051 -0.051
## 314 -0.074 -0.074
## 315 0.134 0.134
## 316 0.003 0.003
## 317 -0.034 -0.034
## 318 -0.101 -0.101
## 319 -0.032 -0.032
## 320 0.148 0.148
## 321 0.027 0.027
## 322 -0.013 -0.013
## 323 -0.006 -0.006
## 324 -0.117 -0.117
## 325 -0.075 -0.075
## 326 -0.007 -0.007
## 327 0.002 0.002
## 328 0.057 0.057
## 329 0.011 0.011
## 330 0.263 0.263
## 331 0.221 0.221
## 332 0.129 0.129
## 333 -0.125 -0.125
## 334 -0.010 -0.010
## 335 -0.210 -0.210
## 336 -0.185 -0.185
## 337 -0.211 -0.211
## 338 -0.083 -0.083
## 339 0.029 0.029
## 340 -0.132 -0.132
## 341 -0.089 -0.089
## 342 -0.090 -0.090
## 343 0.009 0.009
## 344 0.169 0.169
## 345 0.068 0.068
## 346 0.088 0.088
## 347 0.039 0.039
## 348 -0.020 -0.020
## 349 0.084 0.084
## 350 -0.029 -0.029
## 351 -0.102 -0.102
## 352 -0.060 -0.060
## 353 0.001 0.001
## 354 -0.033 -0.033
## 355 0.005 0.005
## 356 0.020 0.020
## 357 0.016 0.016
## 358 0.311 0.311
## 359 0.194 0.194
## 360 -0.175 -0.175
## 361 -0.256 -0.256
## 362 -0.128 -0.128
## 363 -0.254 -0.254
## 364 -0.209 -0.209
## 365 -0.112 -0.112
## 366 -0.032 -0.032
## 367 0.113 0.113
## 368 -0.094 -0.094
## 369 -0.167 -0.167
## 370 -0.033 -0.033
## 371 -0.003 -0.003
## 372 0.228 0.228
## 373 0.076 0.076
## 374 -0.015 -0.015
## 375 -0.029 -0.029
## 376 -0.040 -0.040
## 377 0.129 0.129
## 378 -0.049 -0.049
## 379 -0.084 -0.084
## 380 -0.006 -0.006
## 381 0.005 0.005
## 382 0.011 0.011
## 383 -0.016 -0.016
## 384 0.010 0.010
## 385 0.276 0.276
## 386 -0.245 -0.245
## 387 -0.242 -0.242
## 388 -0.261 -0.261
## 389 -0.032 -0.032
## 390 -0.185 -0.185
## 391 -0.125 -0.125
## 392 -0.125 -0.125
## 393 -0.119 -0.119
## 394 0.055 0.055
## 395 -0.074 -0.074
## 396 -0.072 -0.072
## 397 0.039 0.039
## 398 0.107 0.107
## 399 0.238 0.238
## 400 0.066 0.066
## 401 -0.100 -0.100
## 402 -0.079 -0.079
## 403 -0.019 -0.019
## 404 0.079 0.079
## 405 -0.039 -0.039
## 406 -0.032 -0.032
## 407 0.026 0.026
## 408 0.015 0.015
## 409 -0.029 -0.029
## 410 0.070 0.070
## 411 -0.144 -0.144
## 412 -0.130 -0.130
## 413 -0.223 -0.223
## 414 -0.087 -0.087
## 415 -0.083 -0.083
## 416 -0.108 -0.108
## 417 -0.085 -0.085
## 418 -0.122 -0.122
## 419 -0.151 -0.151
## 420 0.072 0.072
## 421 -0.014 -0.014
## 422 -0.002 -0.002
## 423 0.026 0.026
## 424 0.120 0.120
## 425 0.290 0.290
## 426 0.007 0.007
## 427 -0.088 -0.088
## 428 -0.038 -0.038
## 429 -0.145 -0.145
## 430 0.083 0.083
## 431 0.065 0.065
## 432 -0.040 -0.040
## 433 -0.008 -0.008
## 434 -0.038 -0.038
## 435 0.049 0.049
## 436 0.240 0.240
## 437 0.213 0.213
## 438 0.092 0.092
## 439 0.137 0.137
## 440 0.229 0.229
## 441 0.031 0.031
## 442 -0.028 -0.028
## 443 0.013 0.013
## 444 0.015 0.015
## 445 0.081 0.081
## 446 -0.025 -0.025
## 447 -0.132 -0.132
## 448 -0.103 -0.103
## 449 -0.087 -0.087
## 450 0.200 0.200
## 451 0.059 0.059
## 452 -0.033 -0.033
## 453 -0.024 -0.024
## 454 -0.048 -0.048
## 455 -0.010 -0.010
## 456 0.013 0.013
## 457 -0.012 -0.012
## 458 0.030 0.030
## 459 -0.013 -0.013
## 460 0.266 0.266
## 461 0.154 0.154
## 462 0.166 0.166
## 463 0.072 0.072
## 464 0.151 0.151
## 465 0.006 0.006
## 466 -0.007 -0.007
## 467 -0.021 -0.021
## 468 0.000 0.000
## 469 0.039 0.039
## 470 -0.116 -0.116
## 471 -0.095 -0.095
## 472 0.001 0.001
## 473 0.044 0.044
## 474 0.142 0.142
## 475 -0.007 -0.007
## 476 -0.043 -0.043
## 477 -0.073 -0.073
## 478 0.022 0.022
## 479 -0.011 -0.011
## 480 -0.012 -0.012
## 481 -0.053 -0.053
## 482 0.051 0.051
## 483 0.219 0.219
## 484 0.298 0.298
## 485 0.101 0.101
## 486 0.063 0.063
## 487 0.183 0.183
## 488 0.046 0.046
## 489 0.069 0.069
## 490 -0.001 -0.001
## 491 -0.086 -0.086
## 492 -0.012 -0.012
## 493 -0.172 -0.172
## 494 -0.164 -0.164
## 495 0.023 0.023
## 496 0.049 0.049
## 497 0.110 0.110
## 498 0.024 0.024
## 499 -0.013 -0.013
## 500 -0.032 -0.032
## 501 0.030 0.030
## 502 0.002 0.002
## 503 -0.023 -0.023
## 504 0.000 0.000
## 505 0.246 0.246
## 506 -0.047 -0.047
## 507 0.018 0.018
## 508 0.091 0.091
## 509 0.232 0.232
## 510 0.111 0.111
## 511 -0.108 -0.108
## 512 -0.108 -0.108
## 513 -0.140 -0.140
## 514 -0.094 -0.094
## 515 -0.074 -0.074
## 516 -0.069 -0.069
## 517 -0.031 -0.031
## 518 -0.026 -0.026
## 519 0.180 0.180
## 520 0.096 0.096
## 521 0.036 0.036
## 522 -0.029 -0.029
## 523 0.031 0.031
## 524 0.018 0.018
## 525 -0.047 -0.047
## 526 0.087 0.087
## 527 0.039 0.039
## 528 0.103 0.103
## 529 0.097 0.097
## 530 0.164 0.164
## 531 -0.087 -0.087
## 532 -0.118 -0.118
## 533 -0.171 -0.171
## 534 -0.088 -0.088
## 535 -0.039 -0.039
## 536 0.017 0.017
## 537 -0.053 -0.053
## 538 -0.014 -0.014
## 539 0.022 0.022
## 540 0.167 0.167
## 541 -0.016 -0.016
## 542 0.043 0.043
## 543 -0.028 -0.028
## 544 0.016 0.016
## 545 -0.032 -0.032
## 546 0.270 0.270
## 547 0.139 0.139
## 548 0.116 0.116
## 549 0.125 0.125
## 550 0.051 0.051
## 551 -0.233 -0.233
## 552 -0.213 -0.213
## 553 -0.242 -0.242
## 554 -0.267 -0.267
## 555 0.348 0.348
## 556 -0.010 -0.010
## 557 -0.021 -0.021
## 558 -0.002 -0.002
## 559 -0.022 -0.022
## 560 -0.023 -0.023
## 561 0.019 0.019
## 562 -0.031 -0.031
## 563 0.012 0.012
## 564 -0.013 -0.013
## 565 0.262 0.262
## 566 0.174 0.174
## 567 0.200 0.200
## 568 -0.202 -0.202
## 569 -0.083 -0.083
## 570 -0.291 -0.291
## 571 -0.340 -0.340
## 572 -0.290 -0.290
## 573 0.017 0.017
## 574 0.340 0.340
## 575 0.018 0.018
## 576 0.021 0.021
## 577 -0.003 -0.003
## 578 -0.030 -0.030
## 579 0.022 0.022
## 580 0.066 0.066
## 581 -0.072 -0.072
## 582 -0.011 -0.011
## 583 0.308 0.308
## 584 0.253 0.253
## 585 -0.250 -0.250
## 586 -0.316 -0.316
## 587 -0.113 -0.113
## 588 -0.321 -0.321
## 589 -0.356 -0.356
## 590 -0.095 -0.095
## 591 0.038 0.038
## 592 0.307 0.307
## 593 0.039 0.039
## 594 -0.012 -0.012
## 595 -0.005 -0.005
## 596 -0.035 -0.035
## 597 0.048 0.048
## 598 -0.044 -0.044
## 599 -0.008 -0.008
## 600 0.357 0.357
## 601 -0.308 -0.308
## 602 -0.309 -0.309
## 603 -0.320 -0.320
## 604 -0.110 -0.110
## 605 -0.315 -0.315
## 606 -0.067 -0.067
## 607 -0.111 -0.111
## 608 0.022 0.022
## 609 0.379 0.379
## 610 0.056 0.056
## 611 -0.046 -0.046
## 612 0.006 0.006
## 613 0.020 0.020
## 614 0.022 0.022
## 615 -0.027 -0.027
## 616 -0.262 -0.262
## 617 -0.343 -0.343
## 618 -0.328 -0.328
## 619 -0.297 -0.297
## 620 -0.073 -0.073
## 621 0.038 0.038
## 622 -0.061 -0.061
## 623 -0.076 -0.076
## 624 -0.027 -0.027
## 625 0.460 0.460
## 626 -0.003 -0.003
## 627 -0.001 -0.001
## 628 0.046 0.046
## 629 0.026 0.026
## 630 -0.122 -0.122
## 631 0.360 0.360
## 632 0.351 0.351
## 633 0.305 0.305
## 634 0.259 0.259
## 635 0.092 0.092
## 636 -0.038 -0.038
## 637 -0.052 -0.052
## 638 -0.108 -0.108
## 639 -0.112 -0.112
## 640 0.006 0.006
## 641 0.002 0.002
## 642 -0.004 -0.004
## 643 -0.004 -0.004
## 644 0.004 0.004
## 645 0.404 0.404
## 646 0.376 0.376
## 647 0.424 0.424
## 648 -0.037 -0.037
## 649 0.085 0.085
## 650 -0.095 -0.095
## 651 -0.058 -0.058
## 652 -0.089 -0.089
## 653 -0.039 -0.039
## 654 0.036 0.036
## 655 0.001 0.001
## 656 0.025 0.025
## 657 0.023 0.023
## 658 0.433 0.433
## 659 0.393 0.393
## 660 -0.091 -0.091
## 661 0.012 0.012
## 662 0.069 0.069
## 663 -0.112 -0.112
## 664 -0.152 -0.152
## 665 0.007 0.007
## 666 -0.016 -0.016
## 667 -0.005 -0.005
## 668 0.011 0.011
## 669 0.040 0.040
## 670 0.508 0.508
## 671 -0.039 -0.039
## 672 -0.142 -0.142
## 673 -0.112 -0.112
## 674 0.013 0.013
## 675 -0.122 -0.122
## 676 0.078 0.078
## 677 -0.057 -0.057
## 678 -0.060 -0.060
## 679 0.021 0.021
## 680 0.075 0.075
## 681 -0.010 -0.010
## 682 -0.067 -0.067
## 683 -0.110 -0.110
## 684 -0.165 -0.165
## 685 0.028 0.028
## 686 0.063 0.063
## 687 -0.009 -0.009
## 688 -0.076 -0.076
## 689 0.025 0.025
## 690 0.039 0.039
## 691 0.078 0.078
## 692 -0.020 -0.020
## 693 -0.082 -0.082
## 694 -0.001 -0.001
## 695 -0.026 -0.026
## 696 0.007 0.007
## 697 -0.024 -0.024
## 698 0.036 0.036
## 699 0.028 0.028
## 700 0.100 0.100
## 701 -0.059 -0.059
## 702 -0.050 -0.050
## 703 0.003 0.003
## 704 0.007 0.007
## 705 -0.056 -0.056
## 706 0.029 0.029
## 707 0.040 0.040
## 708 0.059 0.059
## 709 -0.117 -0.117
## 710 0.005 0.005
## 711 0.006 0.006
## 712 0.063 0.063
## 713 -0.075 -0.075
## 714 0.020 0.020
## 715 0.125 0.125
## 716 -0.048 -0.048
## 717 0.026 0.026
## 718 -0.007 -0.007
## 719 0.064 0.064
## 720 -0.029 -0.029
## 721 -0.002 -0.002
## 722 0.012 0.012
## 723 0.037 0.037
## 724 0.019 0.019
## 725 -0.091 -0.091
## 726 -0.078 -0.078
## 727 -0.071 -0.071
## 728 -0.027 -0.027
## 729 0.170 0.170
## 730 0.619 0.619
## 731 -0.220 -0.220
## 732 -0.298 -0.298
## 733 -0.198 -0.198
## 734 -0.324 -0.324
## 735 0.442 0.442
## 736 -0.321 -0.321
## 737 0.148 0.148
## 738 0.158 0.158
## 739 0.314 0.314
## 740 0.114 0.114
## 741 0.094 0.103
## 742 0.095 0.007
## 743 0.100 0.079
## 744 0.062 0.122
## 745 -0.505 -0.505
## 746 0.155 0.155
## 747 -0.334 -0.334
## 748 -0.033 -0.036
## 749 0.084 0.006
## 750 0.023 0.019
## 751 -0.056 -0.111
## 752 0.133 0.133
## 753 0.020 0.022
## 754 -0.013 -0.001
## 755 0.099 0.079
## 756 0.025 0.050
## 757 0.203 0.203
## 758 0.004 0.004
## 759 -0.021 -0.002
## 760 -0.157 -0.125
## 761 0.073 0.145
## 762 0.040 0.040
## 763 -0.327 -0.327
## 764 -0.027 -0.027
## 765 -0.003 -0.003
## 766 -0.008 -0.008
## 767 0.084 0.084
## 768 -0.575 -0.575
## 769 0.098 0.098
## 770 0.010 0.010
## 771 0.047 0.047
## 772 -0.013 -0.013
## 773 -0.119 -0.119
## 774 -0.060 -0.060
## 775 -0.001 -0.001
## 776 -0.135 -0.135
## 777 0.101 0.101
## 778 -0.636 -0.636
## 779 0.011 0.011
## 780 0.086 0.086
## 781 0.095 0.095
#Modification Indices
mod_ind <- modificationindices(fit)
#Spotting the top 10
head_mod <- head(mod_ind[order(mod_ind$mi, decreasing=TRUE), ], 10)
head_mod
## lhs op rhs mi epc sepc.lv sepc.all sepc.nox
## 767 v54 ~~ v55 464.568 0.254 0.254 0.508 0.508
## 722 v45 ~~ v65 388.455 0.231 0.231 0.460 0.460
## 827 v72 ~~ v73 373.140 0.036 0.036 0.619 0.619
## 744 v52 ~~ v55 334.487 0.224 0.224 0.424 0.424
## 755 v53 ~~ v54 333.250 0.213 0.213 0.433 0.433
## 848 Att ~ kWh 303.329 -0.228 -0.334 -0.334 -0.334
## 742 v52 ~~ v53 299.460 0.209 0.209 0.404 0.404
## 196 Int =~ v44 286.552 0.988 0.507 0.506 0.506
## 834 Att ~~ kWh 285.949 -0.195 -0.321 -0.321 -0.321
## 756 v53 ~~ v55 275.746 0.195 0.195 0.393 0.393
#Making a graph
png("initial_SEM_diagram.png", width = 800, height = 600)
semPlot::semPaths(fit, whatLabels = "std", layout = "tree", rotation = 2)
dev.off()
## png
## 2
Initial Longitudinal MODEL
original_longitudinal_model <- '#Defining latent variables#
#Defining indicators for week 1
f1_w1 =~ v11
f2_w1 =~ v21 + v31
f3_w1 =~ v41 + v51
f4_w1 =~ v61
f5_w1 =~ v71
#Defining indicators for week 2
f1_w2 =~ v12
f2_w2 =~ v22 + v32
f3_w2 =~ v42 + v52
f4_w2 =~ v62
f5_w2 =~ v72
#Defining indicators for week 3
f1_w3 =~ v13
f2_w3 =~ v23 + v33
f3_w3 =~ v43 + v53
f4_w3 =~ v63
f5_w3 =~ v73
#Defining indicators for week 4
f1_w4 =~ v14
f2_w4 =~ v24 + v34
f3_w4 =~ v44 + v54
f4_w4 =~ v64
f5_w4 =~ v74
#Defining indicators for week 5
f1_w5 =~ v15
f2_w5 =~ v25 + v35
f3_w5 =~ v45 + v55
f4_w5 =~ v65
f5_w5 =~ v75
#Specifying temporal dependencies & Structural relationships#
#Autoregressive effects for attitude
f1_w2 ~ f1_w1
f1_w3 ~ f1_w2
f1_w4 ~ f1_w3
f1_w5 ~ f1_w4
#Autoregressive effects for subjective norms
f2_w2 ~ f2_w1
f2_w3 ~ f2_w2
f2_w4 ~ f2_w3
f2_w5 ~ f2_w4
#Autoregressive effects for perceived behavioral control
f3_w2 ~ f3_w1
f3_w3 ~ f3_w2
f3_w4 ~ f3_w3
f3_w5 ~ f3_w4
#Autoregressive effects for intention
f4_w1 ~ f1_w1 + f2_w1 + f3_w1
f4_w2 ~ f1_w2 + f2_w2 + f3_w2 + alpha * f4_w1
f4_w3 ~ f1_w3 + f2_w3 + f3_w3 + beta* f4_w2
f4_w4 ~ f1_w4 + f2_w4 + f3_w4 + gamma * f4_w3
f4_w5 ~ f1_w5 + f2_w5 + f3_w5 + delta * f4_w4
#Autoregressive effects for actual consumption
f5_w1 ~ f4_w1 + f3_w1 + energypoverty_MEAN + initial_survey_age + initial_survey_hhsize + initial_survey_gender
f5_w2 ~ f4_w2 + epsilon * f5_w1 + f3_w2
f5_w3 ~ f4_w3 + zeta * f5_w2 + f3_w3
f5_w4 ~ f4_w4 + eta * f5_w3 + f3_w4
f5_w5 ~ f4_w5 + theta * f5_w4 + f3_w5
#Covariance between latent variables at the same time point
#Week 1
f1_w1 ~~ f2_w1
f1_w1 ~~ f3_w1
f2_w1 ~~ f3_w1
#Week 2
f1_w2 ~~ f2_w2
f1_w2 ~~ f3_w2
f2_w2 ~~ f3_w2
#Week 3
f1_w3 ~~ f2_w3
f1_w3 ~~ f3_w3
f2_w3 ~~ f3_w3
#Week 4
f1_w4 ~~ f2_w4
f1_w4 ~~ f3_w4
f2_w4 ~~ f3_w4
#Week5
f1_w5 ~~ f2_w5
f1_w5 ~~ f3_w5
f2_w5 ~~ f3_w5
'
#ANALYSIS#
#Fitting the PPLGM
original_longitudinal_model_fit <- sem(original_longitudinal_model, data = noneg_data_standardized, control = list(maxit = 10000), estimator = "MLR")
## Warning in lav_object_post_check(object): lavaan WARNING: some estimated lv
## variances are negative
summary(original_longitudinal_model_fit, fit.measures=TRUE, standardized=TRUE, rsquare=TRUE)
## lavaan 0.6.16 ended normally after 78 iterations
##
## Estimator ML
## Optimization method NLMINB
## Number of model parameters 119
##
## Used Total
## Number of observations 2352 2363
##
## Model Test User Model:
## Standard Scaled
## Test Statistic 13047.475 7963.642
## Degrees of freedom 651 651
## P-value (Chi-square) 0.000 0.000
## Scaling correction factor 1.638
## Yuan-Bentler correction (Mplus variant)
##
## Model Test Baseline Model:
##
## Test statistic 66738.515 34860.589
## Degrees of freedom 735 735
## P-value 0.000 0.000
## Scaling correction factor 1.914
##
## User Model versus Baseline Model:
##
## Comparative Fit Index (CFI) 0.812 0.786
## Tucker-Lewis Index (TLI) 0.788 0.758
##
## Robust Comparative Fit Index (CFI) 0.817
## Robust Tucker-Lewis Index (TLI) 0.793
##
## Loglikelihood and Information Criteria:
##
## Loglikelihood user model (H0) -88377.250 -88377.250
## Scaling correction factor 3.414
## for the MLR correction
## Loglikelihood unrestricted model (H1) -81853.513 -81853.513
## Scaling correction factor 1.913
## for the MLR correction
##
## Akaike (AIC) 176992.501 176992.501
## Bayesian (BIC) 177678.300 177678.300
## Sample-size adjusted Bayesian (SABIC) 177300.213 177300.213
##
## Root Mean Square Error of Approximation:
##
## RMSEA 0.090 0.069
## 90 Percent confidence interval - lower 0.089 0.068
## 90 Percent confidence interval - upper 0.091 0.070
## P-value H_0: RMSEA <= 0.050 0.000 0.000
## P-value H_0: RMSEA >= 0.080 1.000 0.000
##
## Robust RMSEA 0.088
## 90 Percent confidence interval - lower 0.087
## 90 Percent confidence interval - upper 0.090
## P-value H_0: Robust RMSEA <= 0.050 0.000
## P-value H_0: Robust RMSEA >= 0.080 1.000
##
## Standardized Root Mean Square Residual:
##
## SRMR 0.101 0.101
##
## Parameter Estimates:
##
## Standard errors Sandwich
## Information bread Observed
## Observed information based on Hessian
##
## Latent Variables:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## f1_w1 =~
## v11 1.000 1.000 1.000
## f2_w1 =~
## v21 1.000 0.736 0.735
## v31 0.854 0.043 19.658 0.000 0.628 0.629
## f3_w1 =~
## v41 1.000 0.748 0.748
## v51 0.686 0.050 13.717 0.000 0.513 0.513
## f4_w1 =~
## v61 1.000 1.001 1.000
## f5_w1 =~
## v71 1.000 1.001 1.000
## f1_w2 =~
## v12 1.000 0.831 1.000
## f2_w2 =~
## v22 1.000 0.735 0.744
## v32 0.885 0.035 25.005 0.000 0.651 0.657
## f3_w2 =~
## v42 1.000 0.783 0.791
## v52 0.714 0.046 15.618 0.000 0.559 0.561
## f4_w2 =~
## v62 1.000 0.992 1.000
## f5_w2 =~
## v72 1.000 1.001 1.000
## f1_w3 =~
## v13 1.000 0.858 1.000
## f2_w3 =~
## v23 1.000 0.731 0.747
## v33 0.936 0.039 24.271 0.000 0.684 0.698
## f3_w3 =~
## v43 1.000 0.751 0.770
## v53 0.766 0.044 17.302 0.000 0.575 0.583
## f4_w3 =~
## v63 1.000 0.974 1.000
## f5_w3 =~
## v73 1.000 1.001 1.000
## f1_w4 =~
## v14 1.000 0.874 1.000
## f2_w4 =~
## v24 1.000 0.737 0.760
## v34 0.930 0.033 28.488 0.000 0.686 0.703
## f3_w4 =~
## v44 1.000 0.769 0.794
## v54 0.734 0.043 17.112 0.000 0.564 0.574
## f4_w4 =~
## v64 1.000 0.971 1.000
## f5_w4 =~
## v74 1.000 1.001 1.000
## f1_w5 =~
## v15 1.000 0.856 1.000
## f2_w5 =~
## v25 1.000 0.653 0.677
## v35 0.955 0.036 26.265 0.000 0.624 0.645
## f3_w5 =~
## v45 1.000 0.783 0.815
## v55 0.706 0.057 12.463 0.000 0.552 0.563
## f4_w5 =~
## v65 1.000 0.966 1.000
## f5_w5 =~
## v75 1.000 0.998 1.000
##
## Regressions:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## f1_w2 ~
## f1_w1 0.457 0.018 25.205 0.000 0.550 0.550
## f1_w3 ~
## f1_w2 0.658 0.021 31.681 0.000 0.637 0.637
## f1_w4 ~
## f1_w3 0.683 0.019 35.646 0.000 0.671 0.671
## f1_w5 ~
## f1_w4 0.707 0.017 40.421 0.000 0.721 0.721
## f2_w2 ~
## f2_w1 0.834 0.035 23.837 0.000 0.835 0.835
## f2_w3 ~
## f2_w2 0.944 0.028 33.534 0.000 0.950 0.950
## f2_w4 ~
## f2_w3 0.963 0.027 36.349 0.000 0.955 0.955
## f2_w5 ~
## f2_w4 0.923 0.025 36.262 0.000 1.042 1.042
## f3_w2 ~
## f3_w1 0.783 0.039 20.062 0.000 0.748 0.748
## f3_w3 ~
## f3_w2 0.844 0.031 27.286 0.000 0.880 0.880
## f3_w4 ~
## f3_w3 0.917 0.028 32.989 0.000 0.895 0.895
## f3_w5 ~
## f3_w4 0.864 0.027 31.834 0.000 0.848 0.848
## f4_w1 ~
## f1_w1 0.022 0.031 0.714 0.476 0.022 0.022
## f2_w1 0.260 0.057 4.591 0.000 0.191 0.191
## f3_w1 0.597 0.066 9.005 0.000 0.446 0.446
## f4_w2 ~
## f1_w2 0.081 0.038 2.130 0.033 0.067 0.067
## f2_w2 0.172 0.053 3.237 0.001 0.128 0.128
## f3_w2 0.559 0.058 9.666 0.000 0.441 0.441
## f4_w1 (alph) 0.121 0.026 4.711 0.000 0.122 0.122
## f4_w3 ~
## f1_w3 0.161 0.032 4.992 0.000 0.142 0.142
## f2_w3 0.252 0.047 5.335 0.000 0.189 0.189
## f3_w3 0.397 0.061 6.523 0.000 0.306 0.306
## f4_w2 (beta) 0.217 0.029 7.418 0.000 0.221 0.221
## f4_w4 ~
## f1_w4 0.119 0.034 3.517 0.000 0.107 0.107
## f2_w4 0.118 0.046 2.554 0.011 0.090 0.090
## f3_w4 0.474 0.063 7.502 0.000 0.375 0.375
## f4_w3 (gamm) 0.284 0.030 9.562 0.000 0.285 0.285
## f4_w5 ~
## f1_w5 0.073 0.031 2.336 0.019 0.065 0.065
## f2_w5 0.064 0.059 1.082 0.279 0.043 0.043
## f3_w5 0.551 0.069 8.031 0.000 0.446 0.446
## f4_w4 (delt) 0.289 0.032 9.057 0.000 0.291 0.291
## f5_w1 ~
## f4_w1 0.106 0.033 3.209 0.001 0.105 0.105
## f3_w1 -0.344 0.049 -7.024 0.000 -0.257 -0.257
## en_MEAN 0.069 0.021 3.223 0.001 0.069 0.063
## intl_s_ -0.009 0.002 -5.085 0.000 -0.009 -0.116
## intl_s_ -0.111 0.016 -6.752 0.000 -0.111 -0.140
## intl_s_ -0.009 0.040 -0.210 0.834 -0.008 -0.004
## f5_w2 ~
## f4_w2 0.007 0.011 0.614 0.539 0.007 0.007
## f5_w1 (epsl) 0.931 0.035 26.402 0.000 0.932 0.932
## f3_w2 0.005 0.018 0.269 0.788 0.004 0.004
## f5_w3 ~
## f4_w3 0.002 0.011 0.210 0.834 0.002 0.002
## f5_w2 (zeta) 0.958 0.017 54.936 0.000 0.957 0.957
## f3_w3 0.008 0.014 0.528 0.598 0.006 0.006
## f5_w4 ~
## f4_w4 0.016 0.011 1.471 0.141 0.016 0.016
## f5_w3 (eta) 0.928 0.036 26.061 0.000 0.928 0.928
## f3_w4 -0.030 0.020 -1.535 0.125 -0.023 -0.023
## f5_w5 ~
## f4_w5 -0.028 0.012 -2.242 0.025 -0.027 -0.027
## f5_w4 (thet) 0.939 0.026 36.740 0.000 0.942 0.942
## f3_w5 -0.004 0.017 -0.243 0.808 -0.003 -0.003
##
## Covariances:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## f1_w1 ~~
## f2_w1 0.286 0.022 12.922 0.000 0.389 0.389
## f3_w1 0.384 0.027 14.107 0.000 0.513 0.513
## f2_w1 ~~
## f3_w1 0.377 0.025 15.108 0.000 0.685 0.685
## .f1_w2 ~~
## .f2_w2 0.106 0.012 8.909 0.000 0.380 0.380
## .f3_w2 0.176 0.016 10.802 0.000 0.489 0.489
## .f2_w2 ~~
## .f3_w2 0.148 0.016 9.170 0.000 0.705 0.705
## .f1_w3 ~~
## .f2_w3 0.071 0.010 7.256 0.000 0.471 0.471
## .f3_w3 0.148 0.015 9.675 0.000 0.626 0.626
## .f2_w3 ~~
## .f3_w3 0.077 0.011 7.110 0.000 0.947 0.947
## .f1_w4 ~~
## .f2_w4 0.075 0.010 7.548 0.000 0.528 0.528
## .f3_w4 0.138 0.015 9.143 0.000 0.619 0.619
## .f2_w4 ~~
## .f3_w4 0.061 0.011 5.747 0.000 0.811 0.811
## .f1_w5 ~~
## .f2_w5 0.080 0.012 6.667 0.000 0.707 0.707
## .f3_w5 0.140 0.016 8.563 0.000 0.570 0.570
## .f2_w5 ~~
## .f3_w5 0.085 0.016 5.359 0.000 1.069 1.069
##
## Variances:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## .v11 0.000 0.000 0.000
## .v21 0.461 0.029 16.135 0.000 0.461 0.460
## .v31 0.604 0.029 20.902 0.000 0.604 0.605
## .v41 0.441 0.028 15.846 0.000 0.441 0.441
## .v51 0.737 0.034 21.767 0.000 0.737 0.737
## .v61 0.000 0.000 0.000
## .v71 0.000 0.000 0.000
## .v12 0.000 0.000 0.000
## .v22 0.436 0.025 17.618 0.000 0.436 0.446
## .v32 0.557 0.025 22.007 0.000 0.557 0.568
## .v42 0.366 0.024 15.067 0.000 0.366 0.374
## .v52 0.679 0.035 19.171 0.000 0.679 0.685
## .v62 0.000 0.000 0.000
## .v72 0.000 0.000 0.000
## .v13 0.000 0.000 0.000
## .v23 0.423 0.023 18.203 0.000 0.423 0.442
## .v33 0.492 0.026 19.082 0.000 0.492 0.513
## .v43 0.386 0.025 15.670 0.000 0.386 0.407
## .v53 0.642 0.034 18.650 0.000 0.642 0.660
## .v63 0.000 0.000 0.000
## .v73 0.000 0.000 0.000
## .v14 0.000 0.000 0.000
## .v24 0.398 0.022 18.150 0.000 0.398 0.423
## .v34 0.481 0.024 20.043 0.000 0.481 0.506
## .v44 0.347 0.024 14.530 0.000 0.347 0.370
## .v54 0.648 0.034 18.839 0.000 0.648 0.671
## .v64 0.000 0.000 0.000
## .v74 0.000 0.000 0.000
## .v15 0.000 0.000 0.000
## .v25 0.502 0.026 19.616 0.000 0.502 0.541
## .v35 0.545 0.025 21.449 0.000 0.545 0.584
## .v45 0.310 0.043 7.264 0.000 0.310 0.336
## .v55 0.658 0.039 16.877 0.000 0.658 0.683
## .v65 0.000 0.000 0.000
## .v75 0.000 0.000 0.000
## f1_w1 1.000 0.029 34.965 0.000 1.000 1.000
## f2_w1 0.541 0.033 16.211 0.000 1.000 1.000
## f3_w1 0.559 0.035 16.173 0.000 1.000 1.000
## .f4_w1 0.635 0.038 16.558 0.000 0.634 0.634
## .f5_w1 0.913 0.058 15.633 0.000 0.911 0.911
## .f1_w2 0.481 0.020 23.898 0.000 0.697 0.697
## .f2_w2 0.163 0.020 8.139 0.000 0.302 0.302
## .f3_w2 0.270 0.029 9.232 0.000 0.440 0.440
## .f4_w2 0.584 0.035 16.845 0.000 0.594 0.594
## .f5_w2 0.134 0.029 4.654 0.000 0.134 0.134
## .f1_w3 0.438 0.020 21.493 0.000 0.594 0.594
## .f2_w3 0.052 0.013 4.191 0.000 0.098 0.098
## .f3_w3 0.127 0.022 5.689 0.000 0.225 0.225
## .f4_w3 0.504 0.026 19.068 0.000 0.532 0.532
## .f5_w3 0.085 0.011 7.896 0.000 0.085 0.085
## .f1_w4 0.420 0.020 21.430 0.000 0.550 0.550
## .f2_w4 0.048 0.012 3.870 0.000 0.089 0.089
## .f3_w4 0.117 0.022 5.344 0.000 0.199 0.199
## .f4_w4 0.470 0.028 17.034 0.000 0.498 0.498
## .f5_w4 0.136 0.023 6.053 0.000 0.136 0.136
## .f1_w5 0.352 0.017 20.157 0.000 0.480 0.480
## .f2_w5 -0.037 0.020 -1.805 0.071 -0.086 -0.086
## .f3_w5 0.172 0.044 3.924 0.000 0.281 0.281
## .f4_w5 0.447 0.033 13.388 0.000 0.478 0.478
## .f5_w5 0.108 0.019 5.644 0.000 0.109 0.109
##
## R-Square:
## Estimate
## v11 1.000
## v21 0.540
## v31 0.395
## v41 0.559
## v51 0.263
## v61 1.000
## v71 1.000
## v12 1.000
## v22 0.554
## v32 0.432
## v42 0.626
## v52 0.315
## v62 1.000
## v72 1.000
## v13 1.000
## v23 0.558
## v33 0.487
## v43 0.593
## v53 0.340
## v63 1.000
## v73 1.000
## v14 1.000
## v24 0.577
## v34 0.494
## v44 0.630
## v54 0.329
## v64 1.000
## v74 1.000
## v15 1.000
## v25 0.459
## v35 0.416
## v45 0.664
## v55 0.317
## v65 1.000
## v75 1.000
## f4_w1 0.366
## f5_w1 0.089
## f1_w2 0.303
## f2_w2 0.698
## f3_w2 0.560
## f4_w2 0.406
## f5_w2 0.866
## f1_w3 0.406
## f2_w3 0.902
## f3_w3 0.775
## f4_w3 0.468
## f5_w3 0.915
## f1_w4 0.450
## f2_w4 0.911
## f3_w4 0.801
## f4_w4 0.502
## f5_w4 0.864
## f1_w5 0.520
## f2_w5 NA
## f3_w5 0.719
## f4_w5 0.522
## f5_w5 0.891
mod_ind_pplgm <- modificationindices(original_longitudinal_model_fit)
## Warning in sqrt(var.lhs.value * var.rhs.value): NaNs produced
## Warning in lav_start_check_cov(lavpartable = lavpartable, start = START): lavaan WARNING: starting values imply NaN for a correlation value;
## variables involved are: f1_w5 f2_w5
# spotting the top 10
head_mod_pplgm <- head(mod_ind_pplgm[order(mod_ind_pplgm$mi, decreasing=TRUE), ], 10)
head_mod_pplgm
## lhs op rhs mi epc sepc.lv sepc.all sepc.nox
## 1566 v54 ~~ v55 883.003 0.435 0.435 0.666 0.666
## 1475 v53 ~~ v54 708.106 0.382 0.382 0.593 0.593
## 1349 v52 ~~ v55 706.124 0.396 0.396 0.592 0.592
## 1482 v53 ~~ v55 690.958 0.381 0.381 0.587 0.587
## 1335 v52 ~~ v53 641.690 0.374 0.374 0.566 0.566
## 128 v15 ~~ v15 638.855 -0.759 0.000 0.000 0.000
## 1342 v52 ~~ v54 630.056 0.369 0.369 0.556 0.556
## 1153 v51 ~~ v53 586.494 0.370 0.370 0.538 0.538
## 1146 v51 ~~ v52 559.556 0.373 0.373 0.528 0.528
## 1160 v51 ~~ v54 533.185 0.353 0.353 0.511 0.511
Updated Longitudinal MODEL
updated_longitudianl_model <- '#Defining latent variables#
#Defining indicators for week 1
f1_w1 =~ v11
f2_w1 =~ v21 + v31
f3_w1 =~ v41 + v51
f4_w1 =~ v61
f5_w1 =~ v71
#Defining indicators for week 2
f1_w2 =~ v12
f2_w2 =~ v22 + v32
f3_w2 =~ v42 + v52
f4_w2 =~ v62
f5_w2 =~ v72
#Defining indicators for week 3
f1_w3 =~ v13
f2_w3 =~ v23 + v33
f3_w3 =~ v43 + v53
f4_w3 =~ v63
f5_w3 =~ v73
#Defining indicators for week 4
f1_w4 =~ v14
f2_w4 =~ v24 + v34
f3_w4 =~ v44 + v54
f4_w4 =~ v64
f5_w4 =~ v74
#Defining indicators for week 5
f1_w5 =~ v15
f2_w5 =~ v25 + v35
f3_w5 =~ v45 + v55
f4_w5 =~ v65
f5_w5 =~ v75
#look at whats going on in week five, especially for f2
#Specifying temporal dependencies & Structural relationships#
#Autoregressive effects for attitude
f1_w2 ~ f1_w1
f1_w3 ~ f1_w2
f1_w4 ~ f1_w3
f1_w5 ~ f1_w4
#Autoregressive effects for subjective norms
f2_w2 ~ f2_w1
f2_w3 ~ f2_w2
f2_w4 ~ f2_w3
f2_w5 ~ f2_w4
#Autoregressive effects for perceived behavioral control
f3_w2 ~ f3_w1
f3_w3 ~ f3_w2
f3_w4 ~ f3_w3
f3_w5 ~ f3_w4
#Autoregressive effects for intention
f4_w1 ~ f1_w1 + f2_w1 + f3_w1
f4_w2 ~ f1_w2 + f2_w2 + f3_w2 + alpha * f4_w1
f4_w3 ~ f1_w3 + f2_w3 + f3_w3 + beta* f4_w2
f4_w4 ~ f1_w4 + f2_w4 + f3_w4 + gamma * f4_w3
f4_w5 ~ f1_w5 + f2_w5 + f3_w5 + delta * f4_w4
#Autoregressive effects for actual consumption
f5_w1 ~ f4_w1 + energypoverty_MEAN + initial_survey_age + initial_survey_hhsize + initial_survey_gender + f3_w1
f5_w2 ~ f4_w2 + epsilon * f5_w1 + f3_w2
f5_w3 ~ f4_w3 + zeta * f5_w2 + f3_w3
f5_w4 ~ f4_w4 + eta * f5_w3 + f3_w4
f5_w5 ~ f4_w5 + theta * f5_w4 + f3_w5
#Covariance between latent variables at the same time point
#Week 1
f1_w1 ~~ f2_w1
f1_w1 ~~ f3_w1
f2_w1 ~~ f3_w1
#Week 2
f1_w2 ~~ f2_w2
f1_w2 ~~ f3_w2
f2_w2 ~~ f3_w2
#Week 3
f1_w3 ~~ f2_w3
f1_w3 ~~ f3_w3
f2_w3 ~~ f3_w3
#Week 4
f1_w4 ~~ f2_w4
f1_w4 ~~ f3_w4
f2_w4 ~~ f3_w4
#Week5
f1_w5 ~~ f2_w5
f1_w5 ~~ f3_w5
f2_w5 ~~ f3_w5
#V5 Covariance
v51 ~~ v52
v52 ~~ v53
v53 ~~ v54
v54 ~~ v55
v51 ~~ v53
v51 ~~ v54
v51 ~~ v55
v52 ~~ v54
v52 ~~ v55
v53 ~~ v55
'
#ANALYSIS#
#Fitting the PPLGM
updated_longitudianl_model <- sem(updated_longitudianl_model, data = noneg_data_standardized, control = list(maxit = 10000), estimator = "MLR")
## Warning in lav_object_post_check(object): lavaan WARNING: some estimated lv
## variances are negative
summary(updated_longitudianl_model, fit.measures=TRUE, standardized=TRUE, rsquare=TRUE)
## lavaan 0.6.16 ended normally after 87 iterations
##
## Estimator ML
## Optimization method NLMINB
## Number of model parameters 129
##
## Used Total
## Number of observations 2352 2363
##
## Model Test User Model:
## Standard Scaled
## Test Statistic 8320.083 5089.184
## Degrees of freedom 641 641
## P-value (Chi-square) 0.000 0.000
## Scaling correction factor 1.635
## Yuan-Bentler correction (Mplus variant)
##
## Model Test Baseline Model:
##
## Test statistic 66738.515 34860.589
## Degrees of freedom 735 735
## P-value 0.000 0.000
## Scaling correction factor 1.914
##
## User Model versus Baseline Model:
##
## Comparative Fit Index (CFI) 0.884 0.870
## Tucker-Lewis Index (TLI) 0.867 0.851
##
## Robust Comparative Fit Index (CFI) 0.889
## Robust Tucker-Lewis Index (TLI) 0.872
##
## Loglikelihood and Information Criteria:
##
## Loglikelihood user model (H0) -86013.554 -86013.554
## Scaling correction factor 3.294
## for the MLR correction
## Loglikelihood unrestricted model (H1) -81853.513 -81853.513
## Scaling correction factor 1.913
## for the MLR correction
##
## Akaike (AIC) 172285.109 172285.109
## Bayesian (BIC) 173028.539 173028.539
## Sample-size adjusted Bayesian (SABIC) 172618.679 172618.679
##
## Root Mean Square Error of Approximation:
##
## RMSEA 0.071 0.054
## 90 Percent confidence interval - lower 0.070 0.053
## 90 Percent confidence interval - upper 0.073 0.055
## P-value H_0: RMSEA <= 0.050 0.000 0.000
## P-value H_0: RMSEA >= 0.080 0.000 0.000
##
## Robust RMSEA 0.069
## 90 Percent confidence interval - lower 0.068
## 90 Percent confidence interval - upper 0.071
## P-value H_0: Robust RMSEA <= 0.050 0.000
## P-value H_0: Robust RMSEA >= 0.080 0.000
##
## Standardized Root Mean Square Residual:
##
## SRMR 0.098 0.098
##
## Parameter Estimates:
##
## Standard errors Sandwich
## Information bread Observed
## Observed information based on Hessian
##
## Latent Variables:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## f1_w1 =~
## v11 1.000 1.000 1.000
## f2_w1 =~
## v21 1.000 0.737 0.736
## v31 0.857 0.044 19.387 0.000 0.631 0.631
## f3_w1 =~
## v41 1.000 0.781 0.781
## v51 0.549 0.031 17.586 0.000 0.428 0.430
## f4_w1 =~
## v61 1.000 1.001 1.000
## f5_w1 =~
## v71 1.000 0.999 1.000
## f1_w2 =~
## v12 1.000 0.826 1.000
## f2_w2 =~
## v22 1.000 0.732 0.743
## v32 0.887 0.036 24.873 0.000 0.649 0.657
## f3_w2 =~
## v42 1.000 0.827 0.836
## v52 0.510 0.028 18.099 0.000 0.422 0.430
## f4_w2 =~
## v62 1.000 0.991 1.000
## f5_w2 =~
## v72 1.000 0.999 1.000
## f1_w3 =~
## v13 1.000 0.848 1.000
## f2_w3 =~
## v23 1.000 0.726 0.745
## v33 0.939 0.039 24.306 0.000 0.682 0.698
## f3_w3 =~
## v43 1.000 0.789 0.812
## v53 0.577 0.027 21.062 0.000 0.455 0.466
## f4_w3 =~
## v63 1.000 0.970 1.000
## f5_w3 =~
## v73 1.000 1.000 1.000
## f1_w4 =~
## v14 1.000 0.862 1.000
## f2_w4 =~
## v24 1.000 0.729 0.755
## v34 0.936 0.033 28.783 0.000 0.682 0.703
## f3_w4 =~
## v44 1.000 0.815 0.841
## v54 0.546 0.026 20.897 0.000 0.445 0.457
## f4_w4 =~
## v64 1.000 0.968 1.000
## f5_w4 =~
## v74 1.000 0.999 1.000
## f1_w5 =~
## v15 1.000 0.846 1.000
## f2_w5 =~
## v25 1.000 0.643 0.671
## v35 0.963 0.036 26.555 0.000 0.619 0.644
## f3_w5 =~
## v45 1.000 0.860 0.892
## v55 0.526 0.030 17.338 0.000 0.453 0.463
## f4_w5 =~
## v65 1.000 0.967 1.000
## f5_w5 =~
## v75 1.000 0.996 1.000
##
## Regressions:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## f1_w2 ~
## f1_w1 0.449 0.018 24.602 0.000 0.544 0.544
## f1_w3 ~
## f1_w2 0.641 0.021 30.496 0.000 0.625 0.625
## f1_w4 ~
## f1_w3 0.671 0.019 34.598 0.000 0.660 0.660
## f1_w5 ~
## f1_w4 0.699 0.018 39.569 0.000 0.713 0.713
## f2_w2 ~
## f2_w1 0.818 0.034 23.710 0.000 0.823 0.823
## f2_w3 ~
## f2_w2 0.940 0.028 33.634 0.000 0.948 0.948
## f2_w4 ~
## f2_w3 0.953 0.026 36.023 0.000 0.949 0.949
## f2_w5 ~
## f2_w4 0.916 0.025 36.123 0.000 1.039 1.039
## f3_w2 ~
## f3_w1 0.753 0.033 23.004 0.000 0.711 0.711
## f3_w3 ~
## f3_w2 0.785 0.029 27.447 0.000 0.823 0.823
## f3_w4 ~
## f3_w3 0.875 0.027 32.722 0.000 0.846 0.846
## f3_w5 ~
## f3_w4 0.824 0.028 28.997 0.000 0.780 0.780
## f4_w1 ~
## f1_w1 0.003 0.031 0.105 0.916 0.003 0.003
## f2_w1 0.260 0.052 4.987 0.000 0.191 0.191
## f3_w1 0.600 0.063 9.540 0.000 0.468 0.468
## f4_w2 ~
## f1_w2 0.052 0.036 1.426 0.154 0.043 0.043
## f2_w2 0.155 0.049 3.165 0.002 0.114 0.114
## f3_w2 0.596 0.052 11.384 0.000 0.497 0.497
## f4_w1 (alph) 0.111 0.025 4.413 0.000 0.112 0.112
## f4_w3 ~
## f1_w3 0.128 0.032 4.013 0.000 0.112 0.112
## f2_w3 0.241 0.044 5.439 0.000 0.180 0.180
## f3_w3 0.443 0.055 7.996 0.000 0.360 0.360
## f4_w2 (beta) 0.202 0.029 7.039 0.000 0.206 0.206
## f4_w4 ~
## f1_w4 0.099 0.032 3.091 0.002 0.088 0.088
## f2_w4 0.124 0.042 2.964 0.003 0.094 0.094
## f3_w4 0.496 0.054 9.233 0.000 0.417 0.417
## f4_w3 (gamm) 0.269 0.029 9.292 0.000 0.269 0.269
## f4_w5 ~
## f1_w5 0.084 0.029 2.936 0.003 0.073 0.073
## f2_w5 0.087 0.047 1.864 0.062 0.058 0.058
## f3_w5 0.509 0.050 10.128 0.000 0.453 0.453
## f4_w4 (delt) 0.290 0.031 9.408 0.000 0.291 0.291
## f5_w1 ~
## f4_w1 0.144 0.032 4.446 0.000 0.144 0.144
## en_MEAN 0.070 0.021 3.325 0.001 0.071 0.065
## intl_s_ -0.009 0.002 -5.065 0.000 -0.009 -0.115
## intl_s_ -0.102 0.016 -6.325 0.000 -0.102 -0.129
## intl_s_ -0.011 0.040 -0.271 0.786 -0.011 -0.006
## f3_w1 -0.406 0.048 -8.473 0.000 -0.317 -0.317
## f5_w2 ~
## f4_w2 0.007 0.012 0.578 0.563 0.007 0.007
## f5_w1 (epsl) 0.931 0.036 26.025 0.000 0.932 0.932
## f3_w2 0.004 0.019 0.192 0.848 0.003 0.003
## f5_w3 ~
## f4_w3 0.002 0.012 0.136 0.892 0.002 0.002
## f5_w2 (zeta) 0.958 0.018 54.471 0.000 0.957 0.957
## f3_w3 0.008 0.015 0.564 0.573 0.007 0.007
## f5_w4 ~
## f4_w4 0.021 0.011 1.877 0.061 0.021 0.021
## f5_w3 (eta) 0.927 0.036 25.892 0.000 0.928 0.928
## f3_w4 -0.036 0.020 -1.850 0.064 -0.030 -0.030
## f5_w5 ~
## f4_w5 -0.016 0.012 -1.351 0.177 -0.015 -0.015
## f5_w4 (thet) 0.938 0.026 36.574 0.000 0.941 0.941
## f3_w5 -0.023 0.016 -1.428 0.153 -0.020 -0.020
##
## Covariances:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## f1_w1 ~~
## f2_w1 0.284 0.022 12.754 0.000 0.385 0.385
## f3_w1 0.415 0.024 17.183 0.000 0.531 0.531
## f2_w1 ~~
## f3_w1 0.383 0.024 15.766 0.000 0.666 0.666
## .f1_w2 ~~
## .f2_w2 0.104 0.012 8.716 0.000 0.362 0.362
## .f3_w2 0.189 0.016 11.763 0.000 0.469 0.469
## .f2_w2 ~~
## .f3_w2 0.163 0.016 10.048 0.000 0.673 0.673
## .f1_w3 ~~
## .f2_w3 0.070 0.010 7.134 0.000 0.458 0.458
## .f3_w3 0.168 0.015 11.203 0.000 0.569 0.569
## .f2_w3 ~~
## .f3_w3 0.097 0.011 8.984 0.000 0.939 0.939
## .f1_w4 ~~
## .f2_w4 0.074 0.010 7.455 0.000 0.498 0.498
## .f3_w4 0.151 0.015 10.371 0.000 0.537 0.537
## .f2_w4 ~~
## .f3_w4 0.073 0.011 6.389 0.000 0.728 0.728
## .f1_w5 ~~
## .f2_w5 0.080 0.012 6.652 0.000 0.745 0.745
## .f3_w5 0.146 0.016 9.022 0.000 0.458 0.458
## .f2_w5 ~~
## .f3_w5 0.095 0.016 5.801 0.000 0.971 0.971
## .v51 ~~
## .v52 0.430 0.024 17.886 0.000 0.430 0.540
## .v52 ~~
## .v53 0.441 0.025 17.719 0.000 0.441 0.576
## .v53 ~~
## .v54 0.450 0.025 18.304 0.000 0.450 0.602
## .v54 ~~
## .v55 0.489 0.026 18.520 0.000 0.489 0.651
## .v51 ~~
## .v53 0.433 0.023 18.788 0.000 0.433 0.558
## .v54 0.420 0.023 18.602 0.000 0.420 0.540
## .v55 0.395 0.023 17.320 0.000 0.395 0.507
## .v52 ~~
## .v54 0.444 0.024 18.445 0.000 0.444 0.579
## .v55 0.469 0.024 19.466 0.000 0.469 0.611
## .v53 ~~
## .v55 0.453 0.025 18.283 0.000 0.453 0.605
##
## Variances:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## .v11 0.000 0.000 0.000
## .v21 0.459 0.029 16.004 0.000 0.459 0.458
## .v31 0.601 0.029 20.653 0.000 0.601 0.601
## .v41 0.391 0.026 15.010 0.000 0.391 0.391
## .v51 0.807 0.028 29.362 0.000 0.807 0.815
## .v61 0.000 0.000 0.000
## .v71 0.000 0.000 0.000
## .v12 0.000 0.000 0.000
## .v22 0.436 0.025 17.491 0.000 0.436 0.448
## .v32 0.556 0.025 22.008 0.000 0.556 0.569
## .v42 0.294 0.020 15.021 0.000 0.294 0.301
## .v52 0.784 0.030 26.559 0.000 0.784 0.815
## .v62 0.000 0.000 0.000
## .v72 0.000 0.000 0.000
## .v13 0.000 0.000 0.000
## .v23 0.422 0.023 18.144 0.000 0.422 0.444
## .v33 0.488 0.026 19.017 0.000 0.488 0.512
## .v43 0.322 0.021 15.500 0.000 0.322 0.341
## .v53 0.746 0.029 25.883 0.000 0.746 0.783
## .v63 0.000 0.000 0.000
## .v73 0.000 0.000 0.000
## .v14 0.000 0.000 0.000
## .v24 0.400 0.022 18.057 0.000 0.400 0.429
## .v34 0.476 0.024 20.049 0.000 0.476 0.506
## .v44 0.276 0.020 13.970 0.000 0.276 0.293
## .v54 0.750 0.029 26.123 0.000 0.750 0.791
## .v64 0.000 0.000 0.000
## .v74 0.000 0.000 0.000
## .v15 0.000 0.000 0.000
## .v25 0.504 0.026 19.738 0.000 0.504 0.549
## .v35 0.540 0.025 21.337 0.000 0.540 0.585
## .v45 0.191 0.027 7.123 0.000 0.191 0.205
## .v55 0.752 0.031 24.105 0.000 0.752 0.786
## .v65 0.000 0.000 0.000
## .v75 0.000 0.000 0.000
## f1_w1 1.000 0.029 34.965 0.000 1.000 1.000
## f2_w1 0.543 0.034 16.050 0.000 1.000 1.000
## f3_w1 0.610 0.034 17.953 0.000 1.000 1.000
## .f4_w1 0.623 0.037 16.639 0.000 0.623 0.623
## .f5_w1 0.892 0.058 15.374 0.000 0.893 0.893
## .f1_w2 0.481 0.020 23.908 0.000 0.704 0.704
## .f2_w2 0.173 0.020 8.508 0.000 0.323 0.323
## .f3_w2 0.338 0.029 11.761 0.000 0.494 0.494
## .f4_w2 0.556 0.034 16.519 0.000 0.567 0.567
## .f5_w2 0.134 0.029 4.654 0.000 0.134 0.134
## .f1_w3 0.438 0.020 21.511 0.000 0.609 0.609
## .f2_w3 0.054 0.013 4.214 0.000 0.102 0.102
## .f3_w3 0.200 0.022 8.920 0.000 0.322 0.322
## .f4_w3 0.487 0.026 18.618 0.000 0.517 0.517
## .f5_w3 0.085 0.011 7.898 0.000 0.086 0.086
## .f1_w4 0.419 0.020 21.453 0.000 0.564 0.564
## .f2_w4 0.053 0.013 4.152 0.000 0.100 0.100
## .f3_w4 0.188 0.023 8.300 0.000 0.284 0.284
## .f4_w4 0.450 0.027 16.616 0.000 0.480 0.480
## .f5_w4 0.136 0.022 6.063 0.000 0.136 0.136
## .f1_w5 0.352 0.017 20.180 0.000 0.492 0.492
## .f2_w5 -0.033 0.020 -1.626 0.104 -0.079 -0.079
## .f3_w5 0.290 0.031 9.326 0.000 0.391 0.391
## .f4_w5 0.429 0.028 15.472 0.000 0.459 0.459
## .f5_w5 0.108 0.019 5.657 0.000 0.109 0.109
##
## R-Square:
## Estimate
## v11 1.000
## v21 0.542
## v31 0.399
## v41 0.609
## v51 0.185
## v61 1.000
## v71 1.000
## v12 1.000
## v22 0.552
## v32 0.431
## v42 0.699
## v52 0.185
## v62 1.000
## v72 1.000
## v13 1.000
## v23 0.556
## v33 0.488
## v43 0.659
## v53 0.217
## v63 1.000
## v73 1.000
## v14 1.000
## v24 0.571
## v34 0.494
## v44 0.707
## v54 0.209
## v64 1.000
## v74 1.000
## v15 1.000
## v25 0.451
## v35 0.415
## v45 0.795
## v55 0.214
## v65 1.000
## v75 1.000
## f4_w1 0.377
## f5_w1 0.107
## f1_w2 0.296
## f2_w2 0.677
## f3_w2 0.506
## f4_w2 0.433
## f5_w2 0.866
## f1_w3 0.391
## f2_w3 0.898
## f3_w3 0.678
## f4_w3 0.483
## f5_w3 0.914
## f1_w4 0.436
## f2_w4 0.900
## f3_w4 0.716
## f4_w4 0.520
## f5_w4 0.864
## f1_w5 0.508
## f2_w5 NA
## f3_w5 0.609
## f4_w5 0.541
## f5_w5 0.891
#Modification Indices
mod_ind_pplgm <- modificationindices(updated_longitudianl_model)
## Warning in sqrt(var.lhs.value * var.rhs.value): NaNs produced
## Warning in lav_start_check_cov(lavpartable = lavpartable, start = START): lavaan WARNING: starting values imply NaN for a correlation value;
## variables involved are: f1_w5 f2_w5
# spotting the top 10
head_mod_pplgm <- head(mod_ind_pplgm[order(mod_ind_pplgm$mi, decreasing=TRUE), ], 10)
head_mod_pplgm
## lhs op rhs mi epc sepc.lv sepc.all sepc.nox
## 138 v15 ~~ v15 639.569 -0.759 0.000 0.000 0.000
## 29 f1_w5 =~ v15 528.516 -0.331 -0.280 -0.331 -0.331
## 2354 f5_w1 ~ f5_w5 500.068 1.453 1.447 1.447 1.447
## 1239 v12 ~~ v13 457.764 -0.171 -0.171 NA NA
## 1383 v72 ~~ v73 356.701 -0.028 -0.028 NA NA
## 2398 f5_w3 ~ f5_w4 339.639 -0.315 -0.315 -0.315 -0.315
## 1845 f5_w3 ~~ f5_w4 338.729 -0.043 -0.397 -0.397 -0.397
## 130 v73 ~~ v73 338.578 0.046 0.000 0.000 0.000
## 838 f5_w4 =~ v73 338.138 -0.314 -0.314 -0.314 -0.314
## 2423 f5_w4 ~ f5_w2 337.400 0.479 0.479 0.479 0.479
#Making a graph?
png("updated_longitudinal_SEM_diagram.png", width = 800, height = 600)
semPlot::semPaths(updated_longitudianl_model, whatLabels = "std", layout = "tree", rotation = 2)
dev.off()
## png
## 2