Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
38 commits
Select commit Hold shift + click to select a range
e89f556
start rewriting interface to loss functions and imply
Maximilian-Stefan-Ernst May 13, 2022
7a62bda
add to interface rewrite
Maximilian-Stefan-Ernst May 13, 2022
e11d315
finish rewrite of basic types and objective gradient hessian methods
Maximilian-Stefan-Ernst May 14, 2022
2388156
start rewriting SemML
Maximilian-Stefan-Ernst May 14, 2022
dbe1135
add to SemML refactoring
Maximilian-Stefan-Ernst May 14, 2022
7b4345d
finish ML rewrite
Maximilian-Stefan-Ernst May 14, 2022
5fb42ad
refactor ImplySymbolic
Maximilian-Stefan-Ernst May 14, 2022
7022172
fix RAMSymbolic
Maximilian-Stefan-Ernst May 14, 2022
a6e17f6
bug fixes SemMl and RAMSymbolic
Maximilian-Stefan-Ernst May 15, 2022
e341490
add default methods for objective/gradient/hessian
Maximilian-Stefan-Ernst May 15, 2022
2a3c508
grad/obj/hes methods for ImplyEmpty
Maximilian-Stefan-Ernst May 15, 2022
95e95a1
generic OGH methods for imply
Maximilian-Stefan-Ernst May 15, 2022
abddd1b
typo in RAMSymbolic
Maximilian-Stefan-Ernst May 15, 2022
6eff117
OGH methods for RAM
Maximilian-Stefan-Ernst May 15, 2022
21d13dd
OGH methods für SemConstant
Maximilian-Stefan-Ernst May 15, 2022
bc437ca
OGH methods for ridge
Maximilian-Stefan-Ernst May 15, 2022
45bb35b
refactor wls
Maximilian-Stefan-Ernst May 16, 2022
a036eb8
refactor FIML
Maximilian-Stefan-Ernst May 17, 2022
1bfab82
fix FIML for RAM
Maximilian-Stefan-Ernst May 17, 2022
fc2186f
fix hessian
Maximilian-Stefan-Ernst May 17, 2022
6ca6f23
fix nonsymbolic imply type
Maximilian-Stefan-Ernst May 17, 2022
cdd5afe
fix ML
Maximilian-Stefan-Ernst May 17, 2022
015b0c3
fix WLS
Maximilian-Stefan-Ernst May 17, 2022
1a705b6
fix stuff
Maximilian-Stefan-Ernst May 18, 2022
715e34d
fix typos and make AbstractSemSingle parametric
Maximilian-Stefan-Ernst May 18, 2022
23a89b2
refactor ML
Maximilian-Stefan-Ernst May 18, 2022
887b3ce
code formatting
Maximilian-Stefan-Ernst May 18, 2022
42b5cbf
fix typos
Maximilian-Stefan-Ernst May 18, 2022
fb8828a
sort test files and require explicit passing of meanstructure = true/…
Maximilian-Stefan-Ernst May 18, 2022
131cfd9
fix SemLoss printing
Maximilian-Stefan-Ernst May 18, 2022
4881fc8
fix tests
Maximilian-Stefan-Ernst May 18, 2022
3522264
fix tests to new interface
Maximilian-Stefan-Ernst May 18, 2022
1ba25ab
fix SemWLS meanstructure argument
Maximilian-Stefan-Ernst May 18, 2022
ca14b2b
fix NLopt wrapper
Maximilian-Stefan-Ernst May 18, 2022
c201a86
fix SemFIML constructor
Maximilian-Stefan-Ernst May 19, 2022
df81a86
fix SemML update_observed
Maximilian-Stefan-Ernst May 19, 2022
c7cb3ea
fix FIML model construction in multigroup tests
Maximilian-Stefan-Ernst May 19, 2022
a77c51d
add extended tests environment variable
Maximilian-Stefan-Ernst May 19, 2022
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .github/workflows/CI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ jobs:
runs-on: ${{ matrix.os }}
env:
JULIA_NUM_THREADS: 8
JULIA_EXTENDED_TESTS: true
strategy:
fail-fast: false
matrix:
Expand Down
11 changes: 6 additions & 5 deletions src/StructuralEquationModels.jl
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ export *, ==, @StenoGraph, AbstractEdge, AbstractNode, DirectedEdge, Edge, EdgeM

# type hierarchy
include("types.jl")
include("objective_gradient_hessian.jl")
# fitted objects
include("frontend/fit/SemFit.jl")
# specification of models
Expand Down Expand Up @@ -76,21 +77,21 @@ include("frontend/fit/standard_errors/bootstrap.jl")
export AbstractSem,
AbstractSemSingle, AbstractSemCollection, Sem, SemFiniteDiff, SemForwardDiff, SemEnsemble,
SemImply,
RAMSymbolic, RAM, ImplyEmpty,
RAMSymbolic, RAM, ImplyEmpty, imply,
start_val,
start_fabin3, start_simple, start_parameter_table,
SemLoss,
SemLossFunction, SemML, SemFIML, em_mvn, SemLasso, SemRidge,
SemConstant, SemWLS,
SemConstant, SemWLS, loss,
SemDiff,
SemDiffEmpty, SemDiffOptim, SemDiffNLopt, NLoptConstraint,
SemDiffEmpty, SemDiffOptim, SemDiffNLopt, NLoptConstraint, diff,
SemObs,
SemObsCommon, SemObsMissing,
SemObsCommon, SemObsMissing, observed,
sem_fit,
SemFit,
minimum, solution,
sem_summary,
objective, objective!, gradient, gradient!, hessian, hessian!, objective_gradient!,
objective!, gradient!, hessian!, objective_gradient!, objective_hessian!, gradient_hessian!, objective_gradient_hessian!,
ParameterTable,
EnsembleParameterTable, update_partable!, update_estimate!, update_start!,
Fixed, fixed, Start, start, Label, label,
Expand Down
8 changes: 4 additions & 4 deletions src/frontend/fit/standard_errors/hessian.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,12 @@ function se_hessian(sem_fit::SemFit; hessian = :finitediff)
c = H_scaling(sem_fit.model)

if hessian == :analytic
H = hessian!(sem_fit.model, sem_fit.solution)
elseif hessian == :analytic_last
H = hessian(sem_fit.model)
par = solution(sem_fit)
H = zeros(eltype(par), length(par), length(par))
hessian!(H, sem_fit.model, sem_fit.solution)
elseif hessian == :finitediff
H = FiniteDiff.finite_difference_hessian(
x -> objective!(sem_fit.model, x)[1],
x -> objective!(sem_fit.model, x),
sem_fit.solution
)
elseif hessian == :optimizer
Expand Down
18 changes: 12 additions & 6 deletions src/frontend/specification/Sem.jl
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ function SemFiniteDiff(;

observed, imply, loss, diff = get_fields!(kwargs, observed, imply, loss, diff)

sem = SemFiniteDiff(observed, imply, loss, diff, has_gradient)
sem = SemFiniteDiff(observed, imply, loss, diff, Val(has_gradient))

return sem
end
Expand All @@ -53,7 +53,7 @@ function SemForwardDiff(;

observed, imply, loss, diff = get_fields!(kwargs, observed, imply, loss, diff)

sem = SemForwardDiff(observed, imply, loss, diff, has_gradient)
sem = SemForwardDiff(observed, imply, loss, diff, Val(has_gradient))

return sem
end
Expand Down Expand Up @@ -121,6 +121,8 @@ function get_SemLoss(loss; kwargs...)
else
if !isa(loss, SemLossFunction)
loss = SemLoss(loss(;kwargs...); kwargs...)
else
loss = SemLoss(loss; kwargs...)
end
end
return loss
Expand Down Expand Up @@ -178,10 +180,14 @@ function Base.show(io::IO, loss::SemLoss)
print(io, "SemLoss \n")
print(io, "- Loss Functions \n")
print(io, lossfuntypes...)
print(io, "- Fields \n")
print(io, " F: $(typeof(loss.F))) \n")
print(io, " G: $(typeof(loss.G))) \n")
print(io, " H: $(typeof(loss.H))) \n")
print(io, "- Weights \n")
for weight in loss.weights
if isnothing(weight.w)
print(io, " one \n")
else
print(io, "$(round.(weight.w, digits = 2)) \n")
end
end
end

function Base.show(io::IO, models::SemEnsemble)
Expand Down
77 changes: 62 additions & 15 deletions src/imply/RAM/generic.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
### Types
############################################################################

mutable struct RAM{A1, A2, A3, A4, A5, A6, V, V2, I1, I2, I3, M1, M2, M3, S1, S2, S3, D} <: SemImply
mutable struct RAM{A1, A2, A3, A4, A5, A6, V, V2, I1, I2, I3, M1, M2, M3, M4, S1, S2, S3, B, D} <: SemImply
Σ::A1
A::A2
S::A3
Expand All @@ -12,6 +12,7 @@ mutable struct RAM{A1, A2, A3, A4, A5, A6, V, V2, I1, I2, I3, M1, M2, M3, S1, S2

n_par::V
ram_matrices::V2
has_meanstructure::B

A_indices::I1
S_indices::I2
Expand All @@ -20,6 +21,7 @@ mutable struct RAM{A1, A2, A3, A4, A5, A6, V, V2, I1, I2, I3, M1, M2, M3, S1, S2
F⨉I_A⁻¹::M1
F⨉I_A⁻¹S::M2
I_A::M3
I_A⁻¹::M4

∇A::S1
∇S::S2
Expand All @@ -36,6 +38,7 @@ function RAM(;
specification,
vech = false,
gradient = true,
meanstructure = false,
kwargs...)

if specification isa RAMMatrices
Expand Down Expand Up @@ -84,7 +87,9 @@ function RAM(;
end

# μ
if !isnothing(M_indices)
if meanstructure

has_meanstructure = Val(true)

if gradient
∇M = get_matrix_derivative(M_indices, parameters, n_nod)
Expand All @@ -95,6 +100,7 @@ function RAM(;
μ = zeros(n_var)

else
has_meanstructure = Val(false)
M_indices = nothing
M_pre = nothing
μ = nothing
Expand All @@ -111,6 +117,7 @@ function RAM(;

n_par,
ram_matrices,
has_meanstructure,

A_indices,
S_indices,
Expand All @@ -119,6 +126,7 @@ function RAM(;
F⨉I_A⁻¹,
F⨉I_A⁻¹S,
I_A,
copy(I_A),

∇A,
∇S,
Expand All @@ -129,42 +137,78 @@ function RAM(;
end

############################################################################
### functors
### methods
############################################################################

function (imply::RAM)(parameters, F, G, H, model)
# dispatch on meanstructure
objective!(imply::RAM, par, model) =
objective!(imply, par, model, imply.has_meanstructure)
gradient!(imply::RAM, par, model) =
gradient!(imply, par, model, imply.has_meanstructure)

# objective and gradient
function objective!(imply::RAM, parameters, model, has_meanstructure::Val{T}) where T

fill_A_S_M(
imply.A,
imply.A,
imply.S,
imply.M,
imply.A_indices,
imply.S_indices,
imply.M_indices,
parameters)

imply.I_A .= I - imply.A

if !G
copyto!(imply.F⨉I_A⁻¹, imply.F)
rdiv!(imply.F⨉I_A⁻¹, factorize(imply.I_A))
else
imply.I_A .= LinearAlgebra.inv!(factorize(imply.I_A))
imply.F⨉I_A⁻¹ .= imply.F*imply.I_A
end

copyto!(imply.F⨉I_A⁻¹, imply.F)
rdiv!(imply.F⨉I_A⁻¹, factorize(imply.I_A))

Σ_RAM!(
imply.Σ,
imply.F⨉I_A⁻¹,
imply.S,
imply.F⨉I_A⁻¹S)

if !isnothing(imply.μ)
if T
μ_RAM!(imply.μ, imply.F⨉I_A⁻¹, imply.M)
end

end

function gradient!(imply::RAM, parameters, model, has_meanstructure::Val{T}) where T

fill_A_S_M(
imply.A,
imply.S,
imply.M,
imply.A_indices,
imply.S_indices,
imply.M_indices,
parameters)

imply.I_A .= I - imply.A
copyto!(imply.I_A⁻¹, imply.I_A)

imply.I_A⁻¹ .= LinearAlgebra.inv!(factorize(imply.I_A⁻¹))
imply.F⨉I_A⁻¹ .= imply.F*imply.I_A⁻¹

Σ_RAM!(
imply.Σ,
imply.F⨉I_A⁻¹,
imply.S,
imply.F⨉I_A⁻¹S)

if T
μ_RAM!(imply.μ, imply.F⨉I_A⁻¹, imply.M)
end

end

objective_gradient!(imply::RAM, par, model, has_meanstructure) = gradient!(imply, par, model, has_meanstructure)
objective_hessian!(imply::RAM, par, model, has_meanstructure) = gradient!(imply, par, model, has_meanstructure)
gradient_hessian!(imply::RAM, par, model, has_meanstructure) = gradient!(imply, par, model, has_meanstructure)
objective_gradient_hessian!(imply::RAM, par, model, has_meanstructure) = gradient!(imply, par, model, has_meanstructure)

############################################################################
### Recommended methods
############################################################################
Expand Down Expand Up @@ -203,6 +247,9 @@ M_indices(imply::RAM) = imply.M_indices
F⨉I_A⁻¹(imply::RAM) = imply.F⨉I_A⁻¹
F⨉I_A⁻¹S(imply::RAM) = imply.F⨉I_A⁻¹S
I_A(imply::RAM) = imply.I_A
I_A⁻¹(imply::RAM) = imply.I_A⁻¹ # only for gradient available!

has_meanstructure(imply::RAM) = imply.has_meanstructure

############################################################################
### additional functions
Expand Down
48 changes: 34 additions & 14 deletions src/imply/RAM/symbolic.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
### Types
############################################################################

struct RAMSymbolic{F1, F2, F3, A1, A2, A3, S1, S2, S3, V, V2, F4, A4, F5, A5, D1} <: SemImplySymbolic
struct RAMSymbolic{F1, F2, F3, A1, A2, A3, S1, S2, S3, V, V2, F4, A4, F5, A5, D1, B} <: SemImplySymbolic
Σ_function::F1
∇Σ_function::F2
∇²Σ_function::F3
Expand All @@ -19,6 +19,7 @@ struct RAMSymbolic{F1, F2, F3, A1, A2, A3, S1, S2, S3, V, V2, F4, A4, F5, A5, D1
∇μ_function::F5
∇μ::A5
identifier::D1
has_meanstructure::B
end

############################################################################
Expand All @@ -31,6 +32,7 @@ function RAMSymbolic(;
vech = false,
gradient = true,
hessian = false,
meanstructure = false,
kwargs...)

if specification isa RAMMatrices
Expand Down Expand Up @@ -103,7 +105,8 @@ function RAMSymbolic(;
end

# μ
if !isnothing(M)
if meanstructure
has_meanstructure = Val(true)
μ_symbolic = get_μ_symbolic_RAM(M, A, F)
μ_function = Symbolics.build_function(μ_symbolic, par, expression=Val{false})[2]
μ = zeros(size(μ_symbolic))
Expand All @@ -116,6 +119,7 @@ function RAMSymbolic(;
∇μ = nothing
end
else
has_meanstructure = Val(false)
μ_function = nothing
μ = nothing
∇μ_function = nothing
Expand All @@ -138,27 +142,41 @@ function RAMSymbolic(;
μ,
∇μ_function,
∇μ,
identifier
identifier,
has_meanstructure
)
end

############################################################################
### functors
### objective, gradient, hessian
############################################################################

function (imply::RAMSymbolic)(par, F, G, H, model)
# dispatch on meanstructure
objective!(imply::RAMSymbolic, par, model) =
objective!(imply, par, model, imply.has_meanstructure)
gradient!(imply::RAMSymbolic, par, model) =
gradient!(imply, par, model, imply.has_meanstructure)

# objective
function objective!(imply::RAMSymbolic, par, model, has_meanstructure::Val{T}) where T
imply.Σ_function(imply.Σ, par)
if G || H
imply.∇Σ_function(imply.∇Σ, par)
end
if !isnothing(imply.μ)
imply.μ_function(imply.μ, par)
if G || H
imply.∇μ_function(imply.∇μ, par)
end
end
T && imply.μ_function(imply.μ, par)
end

# gradient
function gradient!(imply::RAMSymbolic, par, model, has_meanstructure::Val{T}) where T
objective!(imply, par, model, imply.has_meanstructure)
imply.∇Σ_function(imply.∇Σ, par)
T && imply.∇μ_function(imply.∇μ, par)
end

# other methods
hessian!(imply::RAMSymbolic, par, model) = gradient!(imply, par, model)
objective_gradient!(imply::RAMSymbolic, par, model) = gradient!(imply, par, model)
objective_hessian!(imply::RAMSymbolic, par, model) = gradient!(imply, par, model)
gradient_hessian!(imply::RAMSymbolic, par, model) = gradient!(imply, par, model)
objective_gradient_hessian!(imply::RAMSymbolic, par, model) = gradient!(imply, par, model)

############################################################################
### Recommended methods
############################################################################
Expand Down Expand Up @@ -189,6 +207,8 @@ end
∇Σ_function(imply::RAMSymbolic) = imply.∇Σ_function
∇²Σ_function(imply::RAMSymbolic) = imply.∇²Σ_function

has_meanstructure(imply::RAMSymbolic) = imply.has_meanstructure

############################################################################
### additional functions
############################################################################
Expand Down
6 changes: 4 additions & 2 deletions src/imply/empty.jl
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,12 @@ function ImplyEmpty(;
end

############################################################################
### functors
### methods
############################################################################

function (imply::ImplyEmpty)(par, F, G, H, model) end
objective!(imply::ImplyEmpty, par, model) = nothing
gradient!(imply::ImplyEmpty, par, model) = nothing
hessian!(imply::ImplyEmpty, par, model) = nothing

############################################################################
### Recommended methods
Expand Down
Loading