@@ -23,6 +23,31 @@ is_hessian_required(::EvaluationTargets{<:Any, <:Any, H}) where {H} = H
2323
2424(targets:: EvaluationTargets )(arg_tuple:: Tuple ) = targets (arg_tuple... )
2525
26+ """
27+ evaluate!(objective, gradient, hessian [, lossfun], model, params)
28+
29+ Evaluates the objective, gradient, and/or Hessian at the given parameter vector.
30+ If a loss function is passed, only this specific loss function is evaluated, otherwise,
31+ the sum of all loss functions in the model is evaluated.
32+
33+ If objective, gradient or hessian are `nothing`, they are not evaluated.
34+ For example, since many numerical optimization algorithms don't require a Hessian,
35+ the computation will be turned off by setting `hessian` to `nothing`.
36+
37+ # Arguments
38+ - `objective`: a Number if the objective should be evaluated, otherwise `nothing`
39+ - `gradient`: a pre-allocated vector the gradient should be written to, otherwise `nothing`
40+ - `hessian`: a pre-allocated matrix the Hessian should be written to, otherwise `nothing`
41+ - `lossfun::SemLossFunction`: loss function to evaluate
42+ - `model::AbstractSem`: model to evaluate
43+ - `params`: vector of parameters
44+
45+ # Implementing a new loss function
46+ To implement a new loss function, a new method for `evaluate!` has to be defined.
47+ This is explained in the online documentation on [Custom loss functions](@ref).
48+ """
49+ function evaluate! end
50+
2651# dispatch on SemImplied
2752evaluate! (objective, gradient, hessian, loss:: SemLossFunction , model:: AbstractSem , params) =
2853 evaluate! (objective, gradient, hessian, loss, implied (model), model, params)
0 commit comments