diff --git a/Project.toml b/Project.toml index b1a2b70..8ece160 100644 --- a/Project.toml +++ b/Project.toml @@ -1,20 +1,25 @@ name = "NLSolversBase" uuid = "d41bc354-129a-5804-8e4c-c37616107c6c" -version = "7.8.3" +version = "7.9.0" [deps] +ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b" DiffResults = "163ba53b-c6d8-5494-b064-1a9d43ac40c5" +DifferentiationInterface = "a0c0ee7d-e4b9-4e03-894e-1c5f64a51d63" Distributed = "8ba89e20-285c-5b6f-9357-94700520ee1b" FiniteDiff = "6a86dc24-6348-571c-b903-95158fe2bd41" ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" [compat] +ADTypes = "1.11.0" DiffResults = "1.0" -ForwardDiff = "0.10" +DifferentiationInterface = "0.6.24" FiniteDiff = "2.0" -julia = "1.5" +ForwardDiff = "0.10" +julia = "1.10" [extras] +ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b" ComponentArrays = "b0b7db55-cfe3-40fc-9ded-d10e2dbeff66" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" OptimTestProblems = "cec144fc-5a64-5bc6-99fb-dde8f63e154c" @@ -24,4 +29,4 @@ SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [targets] -test = ["ComponentArrays", "LinearAlgebra", "OptimTestProblems", "Random", "RecursiveArrayTools", "SparseArrays", "Test"] +test = ["ADTypes", "ComponentArrays", "LinearAlgebra", "OptimTestProblems", "Random", "RecursiveArrayTools", "SparseArrays", "Test"] diff --git a/README.md b/README.md index e7d74f8..bea7580 100644 --- a/README.md +++ b/README.md @@ -25,6 +25,18 @@ There are currently three main types: `NonDifferentiable`, `OnceDifferentiable`, The words in front of `Differentiable` in the type names (`Non`, `Once`, `Twice`) are not meant to indicate a specific classification of the function as such (a `OnceDifferentiable` might be constructed for an infinitely differentiable function), but signals to an algorithm if the correct functions have been constructed or if automatic differentiation should be used to further differentiate the function. +## Automatic differentiation + +Some constructors for `OnceDifferentiable`, `TwiceDifferentiable`, `OnceDifferentiableConstraints` and `TwiceDifferentiableConstraints` accept a positional argument called `autodiff`. +This argument can be either: + +- An object subtyping `AbstractADType`, defined by [ADTypes.jl](https://github.com/SciML/ADTypes.jl) and supported by [DifferentiationInterface.jl](https://github.com/JuliaDiff/DifferentiationInterface.jl). +- A `Symbol` like `:finite` (and variants thereof) or `:forward`, which fall back on `ADTypes.AutoFiniteDiff` and `ADTypes.AutoForwardDiff` respectively. +- A `Bool`, namely `true`, which falls back on `ADTypes.AutoForwardDiff`. + +When the positional argument `chunk` is passed, it is used to configure chunk size in `ADTypes.AutoForwardDiff`, but _only_ if `autodiff in (:forward, true)`. +Indeed, if `autodiff isa ADTypes.AutoForwardDiff`, we assume that the user already selected the appropriate chunk size and so `chunk` is ignored. + ## Examples #### Optimization Say we want to minimize the Hosaki test function diff --git a/src/NLSolversBase.jl b/src/NLSolversBase.jl index 47d07b0..b745340 100644 --- a/src/NLSolversBase.jl +++ b/src/NLSolversBase.jl @@ -2,6 +2,8 @@ __precompile__(true) module NLSolversBase +using ADTypes: AbstractADType, AutoForwardDiff, AutoFiniteDiff +import DifferentiationInterface as DI using FiniteDiff, ForwardDiff, DiffResults import Distributed: clear! export AbstractObjective, @@ -54,9 +56,24 @@ function finitediff_fdtype(autodiff) fdtype end +forwarddiff_chunksize(::Nothing) = nothing +forwarddiff_chunksize(::ForwardDiff.Chunk{C}) where {C} = C + is_finitediff(autodiff) = autodiff ∈ (:central, :finite, :finiteforward, :finitecomplex) is_forwarddiff(autodiff) = autodiff ∈ (:forward, :forwarddiff, true) +get_adtype(autodiff::AbstractADType, chunk=nothing) = autodiff + +function get_adtype(autodiff::Union{Symbol,Bool}, chunk=nothing) + if is_finitediff(autodiff) + return AutoFiniteDiff(; fdtype=finitediff_fdtype(autodiff)()) + elseif is_forwarddiff(autodiff) + return AutoForwardDiff(; chunksize=forwarddiff_chunksize(chunk)) + else + error("The autodiff value $autodiff is not supported. Use :finite or :forward.") + end +end + x_of_nans(x, Tf=eltype(x)) = fill!(Tf.(x), Tf(NaN)) include("objective_types/inplace_factory.jl") diff --git a/src/objective_types/constraints.jl b/src/objective_types/constraints.jl index 7e772be..5d1d103 100644 --- a/src/objective_types/constraints.jl +++ b/src/objective_types/constraints.jl @@ -139,27 +139,13 @@ function OnceDifferentiableConstraints(c!, lx::AbstractVector, ux::AbstractVecto xcache = zeros(T, sizex) ccache = zeros(T, sizec) - if is_finitediff(autodiff) - ccache2 = similar(ccache) - fdtype = finitediff_fdtype(autodiff) - jacobian_cache = FiniteDiff.JacobianCache(xcache, ccache,ccache2,fdtype) - function jfinite!(J, x) - FiniteDiff.finite_difference_jacobian!(J, c!, x, jacobian_cache) - J - end - return OnceDifferentiableConstraints(c!, jfinite!, bounds) - elseif is_forwarddiff(autodiff) - jac_cfg = ForwardDiff.JacobianConfig(c!, ccache, xcache, chunk) - ForwardDiff.checktag(jac_cfg, c!, xcache) - - function jforward!(J, x) - ForwardDiff.jacobian!(J, c!, ccache, x, jac_cfg, Val{false}()) - J - end - return OnceDifferentiableConstraints(c!, jforward!, bounds) - else - error("The autodiff value $autodiff is not support. Use :finite or :forward.") + backend = get_adtype(autodiff, chunk) + jac_prep = DI.prepare_jacobian(c!, ccache, backend, xcache) + function j!(_j, _x) + DI.jacobian!(c!, ccache, _j, jac_prep, backend, _x) + return _j end + return OnceDifferentiableConstraints(c!, j!, bounds) end @@ -179,153 +165,55 @@ function TwiceDifferentiableConstraints(c!, lx::AbstractVector, ux::AbstractVect lc::AbstractVector, uc::AbstractVector, autodiff::Symbol = :central, chunk::ForwardDiff.Chunk = checked_chunk(lx)) - if is_finitediff(autodiff) - fdtype = finitediff_fdtype(autodiff) - return twicediff_constraints_finite(c!,lx,ux,lc,uc,fdtype,nothing) - elseif is_forwarddiff(autodiff) - return twicediff_constraints_forward(c!,lx,ux,lc,uc,chunk,nothing) - else - error("The autodiff value $autodiff is not support. Use :finite or :forward.") - end -end - -function TwiceDifferentiableConstraints(c!, con_jac!,lx::AbstractVector, ux::AbstractVector, - lc::AbstractVector, uc::AbstractVector, - autodiff::Symbol = :central, - chunk::ForwardDiff.Chunk = checked_chunk(lx)) - if is_finitediff(autodiff) - fdtype = finitediff_fdtype(autodiff) - return twicediff_constraints_finite(c!,lx,ux,lc,uc,fdtype,con_jac!) - elseif is_forwarddiff(autodiff) - return twicediff_constraints_forward(c!,lx,ux,lc,uc,chunk,con_jac!) - else - error("The autodiff value $autodiff is not support. Use :finite or :forward.") - end -end - - - -function TwiceDifferentiableConstraints(lx::AbstractArray, ux::AbstractArray) - bounds = ConstraintBounds(lx, ux, [], []) - TwiceDifferentiableConstraints(bounds) -end - - -function twicediff_constraints_forward(c!, lx, ux, lc, uc,chunk,con_jac! = nothing) bounds = ConstraintBounds(lx, ux, lc, uc) T = eltype(bounds) nc = length(lc) nx = length(lx) + x_example = zeros(T, nx) + λ_example = zeros(T, nc) ccache = zeros(T, nc) - xcache = zeros(T, nx) - cache_check = Ref{DataType}(Missing) #the datatype Missing, not the singleton - ref_f= Ref{Any}() #cache for intermediate jacobian used in the hessian - cxxcache = zeros(T, nx * nc, nx) #output cache for hessian - h = reshape(cxxcache, (nc, nx, nx)) #reshaped output - hi = [@view h[i,:,:] for i in 1:nc] - #ref_f caches the closure function with its caches. other aproaches include using a Dict, but the - #cost of switching happens just once per optimize call. - - if isnothing(con_jac!) #if the jacobian is not provided, generate one - jac_cfg = ForwardDiff.JacobianConfig(c!, ccache, xcache, chunk) - ForwardDiff.checktag(jac_cfg, c!, xcache) - - jac! = (J, x) -> begin - ForwardDiff.jacobian!(J, c!, ccache, x, jac_cfg, Val{false}()) - J - end + + function sum_constraints(_x, _λ) + # TODO: get rid of this allocation with DI.Cache + ccache_righttype = zeros(promote_type(T, eltype(_x)), nc) + c!(ccache_righttype, _x) + return sum(_λ[i] * ccache[i] for i in eachindex(_λ, ccache)) + end - con_jac_cached = x -> begin - exists_cache = (cache_check[] == eltype(x)) - if exists_cache - f = ref_f[] - return f(x) - else - jcache = zeros(eltype(x), nc) - out_cache = zeros(eltype(x), nc, nx) - cfg_cache = ForwardDiff.JacobianConfig(c!,jcache,x) - f = z->ForwardDiff.jacobian!(out_cache, c!, jcache, z,cfg_cache,Val{false}()) - ref_f[] = f - cache_check[]= eltype(x) - return f(x) - end - end + backend = get_adtype(autodiff, chunk) - else - jac! = (J,x) -> con_jac!(J,x) - - #here, the cache should also include a JacobianConfig - con_jac_cached = x -> begin - exists_cache = (cache_check[] == eltype(x)) - if exists_cache - f = ref_f[] - return f(x) - else - out_cache = zeros(eltype(x), nc, nx) - f = z->jac!(out_cache,x) - ref_f[] = f - cache_check[]= eltype(x) - return f(x) - end - end + + jac_prep = DI.prepare_jacobian(c!, ccache, backend, x_example) + function con_jac!(_j, _x) + DI.jacobian!(c!, ccache, _j, jac_prep, backend, _x) + return _j end - - hess_config_cache = ForwardDiff.JacobianConfig(typeof(con_jac_cached),lx) - function con_hess!(hess, x, λ) - ForwardDiff.jacobian!(cxxcache, con_jac_cached, x,hess_config_cache,Val{false}()) - for i = 1:nc #hot hessian loop - hess+=λ[i].*hi[i] - end - return hess + + hess_prep = DI.prepare_hessian(sum_constraints, backend, x_example, DI.Constant(λ_example)) + function con_hess!(_h, _x, _λ) + DI.hessian!(sum_constraints, _h, hess_prep, backend, _x, DI.Constant(_λ)) + return _h end - return TwiceDifferentiableConstraints(c!, jac!, con_hess!, bounds) + return TwiceDifferentiableConstraints(c!, con_jac!, con_hess!, bounds) end - -function twicediff_constraints_finite(c!,lx,ux,lc,uc,fdtype,con_jac! = nothing) - bounds = ConstraintBounds(lx, ux, lc, uc) - T = eltype(bounds) - nx = length(lx) - nc = length(lc) - xcache = zeros(T, nx) - ccache = zeros(T, nc) +function TwiceDifferentiableConstraints(c!, con_jac!,lx::AbstractVector, ux::AbstractVector, + lc::AbstractVector, uc::AbstractVector, + autodiff::Symbol = :central, + chunk::ForwardDiff.Chunk = checked_chunk(lx)) + # TODO: is con_jac! still useful? we ignore it here - if isnothing(con_jac!) - jac_ccache = similar(ccache) - jacobian_cache = FiniteDiff.JacobianCache(xcache, ccache,jac_ccache,fdtype) - function jac!(J, x) - FiniteDiff.finite_difference_jacobian!(J, c!, x, jacobian_cache) - J - end - else - jac! = (J,x) -> con_jac!(J,x) - end - cxxcache = zeros(T,nc*nx,nx) # to create cached jacobian - h = reshape(cxxcache, (nc, nx, nx)) #reshaped output - hi = [@view h[i,:,:] for i in 1:nc] - - function jac_vec!(J,x) #to evaluate the jacobian of a jacobian, FiniteDiff needs a vector version of that - j_mat = reshape(J,nc,nx) - return jac!(j_mat,x) - return J - end - hess_xcache =similar(xcache) - hess_cxcache =zeros(T,nc*nx) #output of jacobian, as a vector - hess_cxxcache =similar(hess_cxcache) - hess_config_cache = FiniteDiff.JacobianCache(hess_xcache,hess_cxcache,hess_cxxcache,fdtype) - function con_hess!(hess, x, λ) - FiniteDiff.finite_difference_jacobian!(cxxcache, jac_vec!, x,hess_config_cache) - for i = 1:nc - hi = @view h[i,:,:] - hess+=λ[i].*hi - end - return hess - end - return TwiceDifferentiableConstraints(c!, jac!, con_hess!, bounds) + return TwiceDifferentiableConstraints(c!, lx, ux, lc, uc, autodiff, chunk) end + +function TwiceDifferentiableConstraints(lx::AbstractArray, ux::AbstractArray) + bounds = ConstraintBounds(lx, ux, [], []) + TwiceDifferentiableConstraints(bounds) +end + function TwiceDifferentiableConstraints(bounds::ConstraintBounds) c! = (x, c)->nothing J! = (x, J)->nothing diff --git a/src/objective_types/oncedifferentiable.jl b/src/objective_types/oncedifferentiable.jl index 421fa33..69c3345 100644 --- a/src/objective_types/oncedifferentiable.jl +++ b/src/objective_types/oncedifferentiable.jl @@ -43,37 +43,16 @@ function OnceDifferentiable(f, x_seed::AbstractArray{T}, return OnceDifferentiable(fF, dfF, fdfF, x_seed, F, DF) else - if is_finitediff(autodiff) - - # Figure out which Val-type to use for FiniteDiff based on our - # symbol interface. - fdtype = finitediff_fdtype(autodiff) - df_array_spec = DF - x_array_spec = x_seed - return_spec = typeof(F) - gcache = FiniteDiff.GradientCache(df_array_spec, x_array_spec, fdtype, return_spec) - - function g!(storage, x) - FiniteDiff.finite_difference_gradient!(storage, f, x, gcache) - return - end - function fg!(storage, x) - g!(storage, x) - return f(x) - end - elseif is_forwarddiff(autodiff) - gcfg = ForwardDiff.GradientConfig(f, x_seed, chunk) - g! = (out, x) -> ForwardDiff.gradient!(out, f, x, gcfg) - - fg! = (out, x) -> begin - gr_res = DiffResults.DiffResult(zero(T), out) - ForwardDiff.gradient!(gr_res, f, x, gcfg) - DiffResults.value(gr_res) - end - else - error("The autodiff value $autodiff is not supported. Use :finite or :forward.") + backend = get_adtype(autodiff, chunk) + grad_prep = DI.prepare_gradient(f, backend, x_seed) + function g!(_g, _x) + DI.gradient!(f, _g, grad_prep, backend, _x) + return nothing + end + function fg!(_g, _x) + y, _ = DI.value_and_gradient!(f, _g, grad_prep, backend, _x) + return y end - return OnceDifferentiable(f, g!, fg!, x_seed, F, DF) end end @@ -99,72 +78,18 @@ function OnceDifferentiable(f, x_seed::AbstractArray, F::AbstractArray, DF::Abst fdfF = make_fdf(f, x_seed, F) return OnceDifferentiable(fF, dfF, fdfF, x_seed, F, DF) else - if is_finitediff(autodiff) - # Figure out which Val-type to use for FiniteDiff based on our - # symbol interface. - fdtype = finitediff_fdtype(autodiff) - # Apparently only the third input is aliased. - j_finitediff_cache = FiniteDiff.JacobianCache(copy(x_seed), copy(F), copy(F), fdtype) - if autodiff == :finiteforward - # These copies can be done away with if we add a keyword for - # reusing arrays instead for overwriting them. - Fx = copy(F) - DF = copy(DF) - - x_f, x_df = x_of_nans(x_seed), x_of_nans(x_seed) - f_calls, j_calls = [0,], [0,] - function j_finiteforward!(J, x) - # Exploit the possibility that it might be that x_f == x - # then we don't have to call f again. - - # if at least one element of x_f is different from x, update - if any(x_f .!= x) - f(Fx, x) - f_calls .+= 1 - end - - FiniteDiff.finite_difference_jacobian!(J, f, x, j_finitediff_cache, Fx) - end - function fj_finiteforward!(F, J, x) - f(F, x) - FiniteDiff.finite_difference_jacobian!(J, f, x, j_finitediff_cache, F) - end - - - return OnceDifferentiable(f, j_finiteforward!, fj_finiteforward!, Fx, DF, x_f, x_df, f_calls, j_calls) - end - - function fj_finitediff!(F, J, x) - f(F, x) - FiniteDiff.finite_difference_jacobian!(J, f, x, j_finitediff_cache) - F - end - function j_finitediff!(J, x) - F_cache = copy(F) - fj_finitediff!(F_cache, J, x) - end - - return OnceDifferentiable(f, j_finitediff!, fj_finitediff!, x_seed, F, DF) - - elseif is_forwarddiff(autodiff) - - jac_cfg = ForwardDiff.JacobianConfig(f, F, x_seed, chunk) - ForwardDiff.checktag(jac_cfg, f, x_seed) - - F2 = copy(F) - function j_forwarddiff!(J, x) - ForwardDiff.jacobian!(J, f, F2, x, jac_cfg, Val{false}()) - end - function fj_forwarddiff!(F, J, x) - jac_res = DiffResults.DiffResult(F, J) - ForwardDiff.jacobian!(jac_res, f, F2, x, jac_cfg, Val{false}()) - DiffResults.value(jac_res) - end - - return OnceDifferentiable(f, j_forwarddiff!, fj_forwarddiff!, x_seed, F, DF) - else - error("The autodiff value $(autodiff) is not supported. Use :finite or :forward.") + F2 = similar(F) + backend = get_adtype(autodiff, chunk) + jac_prep = DI.prepare_jacobian(f, F2, backend, x_seed) + function j!(_j, _x) + DI.jacobian!(f, F2, _j, jac_prep, backend, _x) + return _j + end + function fj!(_y, _j, _x) + y, _ = DI.value_and_jacobian!(f, _y, _j, jac_prep, backend, _x) + return y end + return OnceDifferentiable(f, j!, fj!, x_seed, F, DF) end end diff --git a/src/objective_types/twicedifferentiable.jl b/src/objective_types/twicedifferentiable.jl index c008426..b8ce01b 100644 --- a/src/objective_types/twicedifferentiable.jl +++ b/src/objective_types/twicedifferentiable.jl @@ -54,23 +54,11 @@ function TwiceDifferentiable(f, g, g! = df!_from_df(g, F, inplace) fg! = make_fdf(x_seed, F, f, g!) - if is_finitediff(autodiff) - - # Figure out which Val-type to use for FiniteDiff based on our - # symbol interface. - fdtype = finitediff_fdtype(autodiff) - - jcache = FiniteDiff.JacobianCache(x_seed, fdtype) - function h!(storage, x) - FiniteDiff.finite_difference_jacobian!(storage, g!, x, jcache) - return - end - - elseif is_forwarddiff(autodiff) - hcfg = ForwardDiff.HessianConfig(f, copy(x_seed)) - h! = (out, x) -> ForwardDiff.hessian!(out, f, x, hcfg) - else - error("The autodiff value $(autodiff) is not supported. Use :finite or :forward.") + backend = get_adtype(autodiff) + hess_prep = DI.prepare_hessian(f, backend, x_seed) + function h!(_h, _x) + DI.hessian!(f, _h, hess_prep, backend, _x) + return _h end TwiceDifferentiable(f, g!, fg!, h!, x_seed, F) end @@ -80,63 +68,31 @@ TwiceDifferentiable(d::NonDifferentiable, x_seed::AbstractVector{T} = d.x_f, F:: function TwiceDifferentiable(d::OnceDifferentiable, x_seed::AbstractVector{T} = d.x_f, F::Real = real(zero(T)); autodiff = :finite) where T<:Real - if is_finitediff(autodiff) - - # Figure out which Val-type to use for FiniteDiff based on our - # symbol interface. - fdtype = finitediff_fdtype(autodiff) - - jcache = FiniteDiff.JacobianCache(x_seed, fdtype) - function h!(storage, x) - FiniteDiff.finite_difference_jacobian!(storage, d.df, x, jcache) - return - end - elseif is_forwarddiff(autodiff) - hcfg = ForwardDiff.HessianConfig(d.f, copy(gradient(d))) - h! = (out, x) -> ForwardDiff.hessian!(out, d.f, x, hcfg) - else - error("The autodiff value $(autodiff) is not supported. Use :finite or :forward.") + backend = get_adtype(autodiff) + hess_prep = DI.prepare_hessian(d.f, backend, x_seed) + function h!(_h, _x) + DI.hessian!(d.f, _h, hess_prep, backend, _x) + return _h end return TwiceDifferentiable(d.f, d.df, d.fdf, h!, x_seed, F, gradient(d)) end function TwiceDifferentiable(f, x::AbstractArray, F::Real = real(zero(eltype(x))); autodiff = :finite, inplace = true) - if is_finitediff(autodiff) - - # Figure out which Val-type to use for FiniteDiff based on our - # symbol interface. - fdtype = finitediff_fdtype(autodiff) - gcache = FiniteDiff.GradientCache(x, x, fdtype) - - function g!(storage, x) - FiniteDiff.finite_difference_gradient!(storage, f, x, gcache) - return - end - function fg!(storage, x) - g!(storage, x) - return f(x) - end - - function h!(storage, x) - FiniteDiff.finite_difference_hessian!(storage, f, x) - return - end - elseif is_forwarddiff(autodiff) - - gcfg = ForwardDiff.GradientConfig(f, x) - g! = (out, x) -> ForwardDiff.gradient!(out, f, x, gcfg) - - fg! = (out, x) -> begin - gr_res = DiffResults.DiffResult(zero(eltype(x)), out) - ForwardDiff.gradient!(gr_res, f, x, gcfg) - DiffResults.value(gr_res) - end - - hcfg = ForwardDiff.HessianConfig(f, x) - h! = (out, x) -> ForwardDiff.hessian!(out, f, x, hcfg) - else - error("The autodiff value $(autodiff) is not supported. Use :finite or :forward.") + backend = get_adtype(autodiff) + grad_prep = DI.prepare_gradient(f, backend, x) + hess_prep = DI.prepare_hessian(f, backend, x) + function g!(_g, _x) + DI.gradient!(f, _g, grad_prep, backend, _x) + return nothing + end + function fg!(_g, _x) + y, _ = DI.value_and_gradient!(f, _g, grad_prep, backend, _x) + return y + end + function h!(_h, _x) + DI.hessian!(f, _h, hess_prep, backend, _x) + return _h end TwiceDifferentiable(f, g!, fg!, h!, x, F) end diff --git a/test/autodiff.jl b/test/autodiff.jl index 992e5da..b42f141 100644 --- a/test/autodiff.jl +++ b/test/autodiff.jl @@ -52,8 +52,8 @@ gx = g(NLSolversBase.alloc_DF(x, 0.0), x) h(H, x) = copyto!(H, Diagonal(6 .* x)) hx = h(fill(0.0, nx, nx), x) - for dtype in (OnceDifferentiable, TwiceDifferentiable) - for autodiff in (:finite, :forward) + @testset for dtype in (OnceDifferentiable, TwiceDifferentiable) + @testset for autodiff in (:finite, :forward, AutoForwardDiff()) # :forward should be exact, but :finite will not be differentiable = dtype(f, copy(x); autodiff = autodiff) value!(differentiable, copy(x)) @@ -78,7 +78,7 @@ end end end - for autodiff in (:finite, :forward) + @testset for autodiff in (:finite, :forward, AutoForwardDiff()) td = TwiceDifferentiable(x->sum(x), (G, x)->copyto!(G, fill!(copy(x),1)), copy(x); autodiff = autodiff) value(td) value!(td, x) @@ -86,7 +86,7 @@ gradient!(td, x) hessian!(td, x) end - for autodiff in (:finite, :forward) + @testset for autodiff in (:finite, :forward, AutoForwardDiff()) for nd = (NonDifferentiable(x->sum(x), copy(x)), NonDifferentiable(x->sum(x), copy(x), 0.0)) td = TwiceDifferentiable(nd; autodiff = autodiff) value(td) diff --git a/test/runtests.jl b/test/runtests.jl index 5cfc673..07edf77 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -5,6 +5,7 @@ using ComponentArrays using SparseArrays using OptimTestProblems using RecursiveArrayTools +using ADTypes MVP = OptimTestProblems.MultivariateProblems # TODO: Use OptimTestProblems (but it does not have exponential_gradient_hession etc.) @@ -77,11 +78,13 @@ function exponential_hessian_product!(storage, x) storage[2, 2] = 2.0 * exp((3.0 - x[1])^2) * (2.0 * x[2]^2 - 12.0 * x[2] + 19) end -include("objective_types.jl") -include("interface.jl") -include("incomplete.jl") -include("constraints.jl") -include("abstractarrays.jl") -include("autodiff.jl") -include("sparse.jl") -include("kwargs.jl") +@testset verbose=true "NLSolversBase.jl" begin + include("objective_types.jl") + include("interface.jl") + include("incomplete.jl") + include("constraints.jl") + include("abstractarrays.jl") + include("autodiff.jl") + include("sparse.jl") + include("kwargs.jl") +end