From 233326cb8c3a5d2e3e1591bf9add02b116682cda Mon Sep 17 00:00:00 2001 From: Dominique Date: Tue, 22 Jul 2025 13:11:58 -0400 Subject: [PATCH 1/2] parameterize NLPModel type --- Project.toml | 2 +- src/structure.jl | 108 +++++++++++++++++------------------------------ 2 files changed, 39 insertions(+), 71 deletions(-) diff --git a/Project.toml b/Project.toml index 0812b88..a7be450 100644 --- a/Project.toml +++ b/Project.toml @@ -8,4 +8,4 @@ NLPModels = "a4795742-8479-5a88-8948-cc11e1c8c1a6" [compat] NLPModels = "0.18, 0.19, 0.20, 0.21" -julia = "1.6" +julia = "1.10" diff --git a/src/structure.jl b/src/structure.jl index 96264a2..9d27286 100644 --- a/src/structure.jl +++ b/src/structure.jl @@ -2,12 +2,11 @@ export NLPModel """ nlp = NLPModel(x, f; kwargs...) - nlp = NLPModel(x, lvar, uvar, f; kwargs...) -Creates a nonlinear optimization model with objective function `f`, -starting point `x`, and variables bounds `lvar` and `uvar` (if provided). -You can provide additional functions by keyword arguments. -Here is the list of accepted function names and their signatures: +Creates a nonlinear optimization model with objective function `f`, and +starting point `x`. +You can provide bounds and additional functions by keyword arguments. +Here is the list of accepted kwyword arguments and their default value: Unconstrained: - `grad = (gx, x) -> gx`: gradient of `f` at `x`. Stores in `gx`. @@ -16,6 +15,8 @@ Unconstrained: - `hess_coord = (rows, cols, (vals, x; obj_weight=1) -> ...)`: sparse Hessian at `x` in triplet format. Constrained: +- `lvar = -Inf * ones(nvar)`: vecteur of lower bounds on `x`. +- `uvar = Inf * ones(nvar)`: vecteur of upper bounds on `x`. - `cons = ((cx, x) -> ..., lcon, ucon)`: constraints at `x`. Stores in `cx`. `lcon` and `ucon` are the constraint bounds. - `jprod = (jv, x, v) -> ...`: Jacobian at `x` times vector `v`. Stores in `jv`. - `jtprod = (jtv, x, v) -> ...`: transposed Jacobian at `x` times vector `v`. Stores in `jtv`. @@ -23,22 +24,22 @@ Constrained: - `hprod = (hv, x, y, v; obj_weight=1) -> ...`: Lagrangian Hessian at `(x, y)` times vector `v`. Stores in `hv`. - `hess_coord = (rows, cols, (vals, x, y; obj_weight=1) -> ...)`: sparse Lagrangian Hessian at `(x,y)` in triplet format. """ -struct NLPModel{T, V} <: AbstractNLPModel{T, V} +struct NLPModel{T, V, F, G, FG, Hv, Vi, H, C, Jv, Jtu, J} <: AbstractNLPModel{T, V} meta::NLPModelMeta{T, V} counters::Counters - obj # obj(x) - grad # grad(gx, x) - objgrad # objgrad(gx, x) -> (f, gx) - hprod # hprod(hv, x, v; obj_weight::Real=1) or hprod(hv, x, y, v; obj_weight::Real=1) - Hrows - Hcols - Hvals # Hvals(vals, x; obj_weight::Real=1) or Hvals(vals, x, y; obj_weight::Real=1) - cons # cons(cx, x) - jprod # jprod(jv, x, v) - jtprod # jtprod(jtv, x, v) - Jrows - Jcols - Jvals # Jvals(vals, x) + obj::F # obj(x) + grad::G # grad(gx, x) + objgrad::FG # objgrad(gx, x) -> (f, gx) + hprod::Hv # hprod(hv, x, v; obj_weight::Real=1) or hprod(hv, x, y, v; obj_weight::Real=1) + Hrows::Vi + Hcols::Vi + Hvals::H # Hvals(vals, x; obj_weight::Real=1) or Hvals(vals, x, y; obj_weight::Real=1) + cons::C # cons(cx, x) + jprod::Jv # jprod(jv, x, v) + jtprod::Jtu # jtprod(jtv, x, v) + Jrows::Vi + Jcols::Vi + Jvals::J # Jvals(vals, x) end function notimplemented(args...; kwargs...) @@ -48,6 +49,8 @@ end function NLPModel( x::V, obj; + lvar::V = fill!(V(undef, length(x)), -Inf), + uvar::V = fill!(V(undef, length(x)), Inf), grad = notimplemented, objgrad = notimplemented, hprod = notimplemented, @@ -65,6 +68,8 @@ function NLPModel( meta = NLPModelMeta{T, V}( length(x), x0 = x, + lvar = lvar, + uvar = uvar, nnzj = nnzj, nnzh = nnzh, ncon = length(lcon), @@ -72,11 +77,22 @@ function NLPModel( ucon = ucon; meta_args..., ) - return NLPModel{T, V}( + grad = grad == notimplemented ? (gx, x) -> objgrad(gx, x)[2] : grad + F = typeof(obj) + G = typeof(grad) + FG = typeof(objgrad) + Hv = typeof(hprod) + Vi = typeof(Hrows) + H = typeof(Hvals) + C = typeof(c) + Jv = typeof(jprod) + Jtu = typeof(jtprod) + J = typeof(Jvals) + return NLPModel{T, V, F, G, FG, Hv, Vi, H, C, Jv, Jtu, J}( meta, Counters(), obj, - grad === notimplemented ? (gx, x) -> objgrad(gx, x)[2] : grad, + grad, objgrad, hprod, Hrows, @@ -91,52 +107,4 @@ function NLPModel( ) end -function NLPModel( - x::V, - ℓ::V, - u::V, - obj; - grad = notimplemented, - objgrad = notimplemented, - hprod = notimplemented, - hess_coord = (Int[], Int[], notimplemented), - cons = (notimplemented, V(undef, 0), V(undef, 0)), - jprod = notimplemented, - jtprod = notimplemented, - jac_coord = (Int[], Int[], notimplemented), - meta_args = (), -) where {T, V <: AbstractVector{T}} - Hrows, Hcols, Hvals = hess_coord - Jrows, Jcols, Jvals = jac_coord - c, lcon, ucon = cons - nnzh, nnzj = length(Hrows), length(Jrows) - meta = NLPModelMeta{T, V}( - length(x), - x0 = x, - lvar = ℓ, - uvar = u, - nnzj = nnzj, - nnzh = nnzh, - ncon = length(lcon), - lcon = lcon, - ucon = ucon; - meta_args..., - ) - return NLPModel{T, V}( - meta, - Counters(), - obj, - grad === notimplemented ? (gx, x) -> objgrad(gx, x)[2] : grad, - objgrad, - hprod, - Hrows, - Hcols, - Hvals, - c, - jprod, - jtprod, - Jrows, - Jcols, - Jvals, - ) -end +@deprecate NLPModel(x, ℓ, u, args...; kwargs...) NLPModel(x, args...; lvar = ℓ, uvar = u, kwargs...) From 1be1cc98bf8770698f2df2e12c4d77a5762c51f2 Mon Sep 17 00:00:00 2001 From: Dominique Date: Wed, 23 Jul 2025 15:28:27 -0400 Subject: [PATCH 2/2] test allocations --- test/Project.toml | 5 +++++ test/allocs.jl | 15 +++++++++++++++ test/problems/hs5.jl | 15 +++++++++++---- test/problems/hs6.jl | 2 +- test/runtests.jl | 1 + 5 files changed, 33 insertions(+), 5 deletions(-) create mode 100644 test/allocs.jl diff --git a/test/Project.toml b/test/Project.toml index d550ecf..e971e26 100644 --- a/test/Project.toml +++ b/test/Project.toml @@ -4,3 +4,8 @@ LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" NLPModels = "a4795742-8479-5a88-8948-cc11e1c8c1a6" NLPModelsTest = "7998695d-6960-4d3a-85c4-e1bceb8cd856" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" + +[compat] +CUDA = "5.8" +NLPModels = "0.21" +NLPModelsTest = "0.10" diff --git a/test/allocs.jl b/test/allocs.jl new file mode 100644 index 0000000..6927f11 --- /dev/null +++ b/test/allocs.jl @@ -0,0 +1,15 @@ +@testset "Allocations" begin + @testset "Allocations for hs5" begin + @testset "hs5" begin + include("problems/hs5.jl") + test_zero_allocations(hs5()) + end + end + + @testset "Allocations for hs6" begin + @testset "hs6" begin + include("problems/hs6.jl") + test_zero_allocations(hs6()) + end + end +end diff --git a/test/problems/hs5.jl b/test/problems/hs5.jl index 1b7d2d8..46e6dce 100644 --- a/test/problems/hs5.jl +++ b/test/problems/hs5.jl @@ -19,16 +19,23 @@ hs5() = hs5(Float64) hs5(::Type{T}) where {T <: Number} = hs5(Vector{T}) function hs5(::Type{V}) where {V} T = eltype(V) - hprod(hv, x, v; obj_weight = one(T)) = - (hv .= (-sin(x[1] + x[2]) * (v[1] + v[2]) .+ 2 * V([v[1] - v[2]; v[2] - v[1]])) * obj_weight) + hprod(hv, x, v; obj_weight = one(T)) = begin + hv[1] = v[1] - v[2] + hv[2] = v[2] - v[1] + hv .*= 2 * obj_weight + hv .+= -sin(x[1] + x[2]) * (v[1] + v[2]) + end hess_coord(vals, x; obj_weight = one(T)) = begin vals[1] = vals[3] = -sin(x[1] + x[2]) + 2 vals[2] = -sin(x[1] + x[2]) - 2 vals .*= obj_weight end f(x) = sin(x[1] + x[2]) + (x[1] - x[2])^2 - 3x[1] / 2 + 5x[2] / 2 + 1 - grad(gx, x) = - (gx .= cos(x[1] + x[2]) .+ 2 * (x[1] - x[2]) * V([1; -1]) + V([-15 // 10; 25 // 10])) + grad(gx, x) = begin + gx[1] = -15 // 10 + 2 * (x[1] - x[2]) + gx[2] = 25 // 10 - 2 * (x[1] - x[2]) + gx .+= cos(x[1] + x[2]) + end objgrad(gx, x) = f(x), grad(gx, x) return NLPModel( fill!(V(undef, 2), 0), diff --git a/test/problems/hs6.jl b/test/problems/hs6.jl index 72223ae..5534d11 100644 --- a/test/problems/hs6.jl +++ b/test/problems/hs6.jl @@ -25,7 +25,7 @@ function hs6(::Type{V}) where {V} return NLPModel( V([-12 // 10; 1]), x -> (1 - x[1])^2; - grad = (gx, x) -> gx .= V([2 * (x[1] - 1); 0]), + grad = (gx, x) -> (gx[1] = 2 * (x[1] - 1); gx[2] = 0; gx), # objgrad explicitly not implemented hprod = hprod, hess_coord = ([1], [1], hess_coord), diff --git a/test/runtests.jl b/test/runtests.jl index 24f7280..27246a5 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -4,3 +4,4 @@ using CUDA, Test include("nlpmodelstest.jl") include("notimplemented.jl") +include("allocs.jl")