diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index 5758bf66..d717330c 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -13,8 +13,8 @@ concurrency: cancel-in-progress: ${{ startsWith(github.ref, 'refs/pull/') }} jobs: test: - name: Julia ${{ matrix.version }} - ${{ matrix.os }} - ${{ matrix.arch }} - ${{ github.event_name }} - runs-on: ${{ matrix.os }} + name: Julia ${{ matrix.version }} (${{ matrix.group }}) + runs-on: ubuntu-latest timeout-minutes: 60 permissions: # needed to allow julia-actions/cache to proactively delete old caches that it has created actions: write @@ -25,19 +25,23 @@ jobs: version: - '1.6' - '1' - os: - - ubuntu-latest - arch: - - x64 + group: + - Core + - NLPModels + exclude: + - version: '1.6' + group: NLPModels steps: - uses: actions/checkout@v4 - uses: julia-actions/setup-julia@v2 with: version: ${{ matrix.version }} - arch: ${{ matrix.arch }} + arch: x64 - uses: julia-actions/cache@v2 - uses: julia-actions/julia-buildpkg@v1 - uses: julia-actions/julia-runtest@v1 + env: + JULIA_SCT_TEST_GROUP: ${{ matrix.group }} - uses: julia-actions/julia-processcoverage@v1 - uses: codecov/codecov-action@v4 with: @@ -55,7 +59,7 @@ jobs: - uses: actions/checkout@v4 - uses: julia-actions/setup-julia@v2 with: - version: '1.10' + version: '1' - uses: julia-actions/cache@v2 - name: Configure doc environment shell: julia --project=docs --color=yes {0} diff --git a/test/Project.toml b/test/Project.toml index 89487571..dd742a30 100644 --- a/test/Project.toml +++ b/test/Project.toml @@ -9,6 +9,7 @@ JET = "c3a54625-cd67-489e-a8e7-0a5a0ff4e31b" JuliaFormatter = "98e50ef6-434e-11e9-1051-2b60c6c9e899" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" NNlib = "872c559c-99b0-510c-b3b7-b6c96a88d5cd" +Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" ReferenceTests = "324d217c-45ce-50fc-942e-d289b448e8cf" SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" diff --git a/test/nlpmodels.jl b/test/nlpmodels.jl new file mode 100644 index 00000000..5abe105e --- /dev/null +++ b/test/nlpmodels.jl @@ -0,0 +1,207 @@ +using ADTypes: ADTypes +using LinearAlgebra +using SparseArrays +using SparseConnectivityTracer +import SparseConnectivityTracer as SCT +using Test + +using Pkg +Pkg.add([ + "ADNLPModels", "ForwardDiff", "OptimizationProblems", "NLPModels", "NLPModelsJuMP" +]) + +using ADNLPModels +using ForwardDiff: ForwardDiff +using NLPModels +using NLPModelsJuMP +using OptimizationProblems + +## ForwardDiff reference + +function directional_derivative(f, x::AbstractVector, d::AbstractVector) + return ForwardDiff.derivative(t -> f(x + t * d), zero(eltype(x))) +end + +function second_directional_derivative( + f, x::AbstractVector, din::AbstractVector, dout::AbstractVector +) + f_din(x) = directional_derivative(f, x, din) + return directional_derivative(f_din, x, dout) +end + +function jac_coeff(f, x::AbstractVector, i::Integer, j::Integer) + d = zero(x) + d[j] = 1 + return directional_derivative(f, x, d)[i] +end + +function hess_coeff(f, x::AbstractVector, i::Integer, j::Integer) + din = zero(x) + din[i] = 1 + dout = zero(x) + dout[j] = 1 + return second_directional_derivative(f, x, din, dout) +end + +## Function wrappers + +function mycons(nlp, x) + c = similar(x, nlp.meta.ncon) + cons!(nlp, x, c) + return c +end + +function mylag(nlp, x) + o = obj(nlp, x) + c = mycons(nlp, x) + λ = randn(length(c)) + return o + dot(λ, c) +end + +## Jacobian sparsity + +function jac_coeff(name::Symbol, i::Integer, j::Integer) + nlp = OptimizationProblems.ADNLPProblems.eval(name)() + f = Base.Fix1(mycons, nlp) + x = nlp.meta.x0 + return jac_coeff(f, x, i, j) +end + +function jac_sparsity_sct(name::Symbol) + nlp = OptimizationProblems.ADNLPProblems.eval(name)() + f = Base.Fix1(mycons, nlp) + x = nlp.meta.x0 + return ADTypes.jacobian_sparsity(f, x, TracerSparsityDetector()) +end + +function jac_sparsity_jump(name::Symbol) + jump_model = OptimizationProblems.PureJuMP.eval(name)() + nlp = MathOptNLPModel(jump_model) + jrows, jcols = jac_structure(nlp) + nnzj = length(jrows) + jvals = ones(Bool, nnzj) + return sparse(jrows, jcols, jvals, nlp.meta.ncon, nlp.meta.nvar) +end + +## Hessian sparsity + +function hess_coeff(name::Symbol, i::Integer, j::Integer) + nlp = OptimizationProblems.ADNLPProblems.eval(name)() + f = Base.Fix1(mylag, nlp) + x = nlp.meta.x0 + return hess_coeff(f, x, i, j) +end + +function hess_sparsity_sct(name::Symbol) + nlp = OptimizationProblems.ADNLPProblems.eval(name)() + f = Base.Fix1(mylag, nlp) + x = nlp.meta.x0 + return ADTypes.hessian_sparsity(f, x, TracerSparsityDetector()) +end + +function hess_sparsity_jump(name::Symbol) + jump_model = OptimizationProblems.PureJuMP.eval(name)() + nlp = MathOptNLPModel(jump_model) + hrows, hcols = hess_structure(nlp) + nnzh = length(hrows) + hvals = ones(Bool, nnzh) + H_L = sparse(hrows, hcols, hvals, nlp.meta.nvar, nlp.meta.nvar) + # only the lower triangular part is stored + return sparse(Symmetric(H_L, :L)) +end + +## Comparison + +function compare_patterns(; sct, jump) + A_diff = jump - sct + nnz_sct = nnz(sct) + nnz_jump = nnz(jump) + + diagonal = if A_diff == Diagonal(A_diff) + "[diagonal difference only]" + else + "" + end + message = if all(>(0), nonzeros(A_diff)) + "SCT ($nnz_sct nz) ⊂ JuMP ($nnz_jump nz) $diagonal" + elseif all(<(0), nonzeros(A_diff)) + "SCT ($nnz_sct nz) ⊃ JuMP ($nnz_jump nz) $diagonal" + else + "SCT ($nnz_sct nz) ≠ JuMP ($nnz_jump nz) $diagonal" + end + return message +end + +## Actual tests + +@testset verbose = true "ForwardDiff reference" begin + f(x) = sin.(x) .* cos.(reverse(x)) .* exp(x[1]) .* log(x[end]) + g(x) = sum(f(x)) + x = rand(6) + @testset "Jacobian" begin + J = ForwardDiff.jacobian(f, x) + for i in axes(J, 1), j in axes(J, 2) + @test J[i, j] == jac_coeff(f, x, i, j) + end + end + @testset "Hessian" begin + H = ForwardDiff.hessian(g, x) + for i in axes(H, 1), j in axes(H, 2) + @test H[i, j] == hess_coeff(g, x, i, j) + end + end +end; + +jac_inconsistencies = [] + +@testset verbose = true "Jacobian comparison" begin + @testset "$name" for name in Symbol.(OptimizationProblems.meta[!, :name]) + @info "Testing Jacobian sparsity for $name" + J_sct = jac_sparsity_sct(name) + J_jump = jac_sparsity_jump(name) + if J_sct == J_jump + @test J_sct == J_jump + else + @test_broken J_sct == J_jump + J_diff = J_jump - J_sct + message = compare_patterns(; sct=J_sct, jump=J_jump) + # @warn "Inconsistency for Jacobian of $name: $message" + push!(jac_inconsistencies, (name, message)) + @test all(zip(findnz(J_diff)...)) do (i, j, _) + iszero(jac_coeff(name, i, j)) + end + end + end +end; + +hess_inconsistencies = [] + +@testset verbose = true "Hessian comparison" begin + @testset "$name" for name in Symbol.(OptimizationProblems.meta[!, :name]) + @info "Testing Hessian sparsity for $name" + H_sct = hess_sparsity_sct(name) + H_jump = hess_sparsity_jump(name) + if H_sct == H_jump + @test H_sct == H_jump + else + @test_broken H_sct == H_jump + message = compare_patterns(; sct=H_sct, jump=H_jump) + # @warn "Inconsistency for Hessian of $name: $message" + push!(hess_inconsistencies, (name, message)) + H_diff = H_jump - H_sct + @test all(zip(findnz(H_diff)...)) do (i, j, _) + iszero(hess_coeff(name, i, j)) + end + end + end +end; + +if !isempty(jac_inconsistencies) || !isempty(hess_inconsistencies) + @warn "Inconsistencies were detected" + for (name, message) in jac_inconsistencies + @warn "Inconsistency for Jacobian of $name: $message" + end + for (name, message) in hess_inconsistencies + @warn "Inconsistency for Hessian of $name: $message" + end +end diff --git a/test/runtests.jl b/test/runtests.jl index e75f4ad9..a20dd297 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -18,73 +18,90 @@ DocMeta.setdocmeta!( recursive=true, ) +GROUP = get(ENV, "JULIA_SCT_TEST_GROUP", "All") + @testset verbose = true "SparseConnectivityTracer.jl" begin - @testset verbose = true "Formalities" begin - if VERSION >= v"1.10" - @testset "Code formatting" begin - @test JuliaFormatter.format( - SparseConnectivityTracer; verbose=false, overwrite=false - ) - end - @testset "Aqua tests" begin - Aqua.test_all( - SparseConnectivityTracer; - ambiguities=false, - deps_compat=(ignore=[:Random, :SparseArrays], check_extras=false), - stale_deps=(ignore=[:Requires],), - persistent_tasks=false, - ) + if GROUP in ("Core", "All") + @testset verbose = true "Formalities" begin + if VERSION >= v"1.10" + @testset "Code formatting" begin + @test JuliaFormatter.format( + SparseConnectivityTracer; verbose=false, overwrite=false + ) + end + @testset "Aqua tests" begin + Aqua.test_all( + SparseConnectivityTracer; + ambiguities=false, + deps_compat=(ignore=[:Random, :SparseArrays], check_extras=false), + stale_deps=(ignore=[:Requires],), + persistent_tasks=false, + ) + end + @testset "JET tests" begin + JET.test_package(SparseConnectivityTracer; target_defined_modules=true) + end end - @testset "JET tests" begin - JET.test_package(SparseConnectivityTracer; target_defined_modules=true) + @testset "Doctests" begin + Documenter.doctest(SparseConnectivityTracer) end end - @testset "Doctests" begin - Documenter.doctest(SparseConnectivityTracer) - end end @testset verbose = true "Set types" begin - @testset "DuplicateVector" begin - include("settypes/duplicatevector.jl") - end - @testset "RecursiveSet" begin - include("settypes/recursiveset.jl") - end - @testset "SortedVector" begin - include("settypes/sortedvector.jl") + if GROUP in ("Core", "All") + @testset "DuplicateVector" begin + include("settypes/duplicatevector.jl") + end + @testset "RecursiveSet" begin + include("settypes/recursiveset.jl") + end + @testset "SortedVector" begin + include("settypes/sortedvector.jl") + end end end @testset "Classification of operators by diff'ability" begin - include("classification.jl") + if GROUP in ("Core", "All") + include("classification.jl") + end end @testset verbose = true "Simple examples" begin - @testset "Tracer Construction" begin - include("test_connectivity.jl") - end - @testset "ConnectivityTracer" begin - include("test_connectivity.jl") - end - @testset "GradientTracer" begin - include("test_gradient.jl") - end - @testset "HessianTracer" begin - include("test_hessian.jl") + if GROUP in ("Core", "All") + @testset "Tracer Construction" begin + include("test_connectivity.jl") + end + @testset "ConnectivityTracer" begin + include("test_connectivity.jl") + end + @testset "GradientTracer" begin + include("test_gradient.jl") + end + @testset "HessianTracer" begin + include("test_hessian.jl") + end end end @testset verbose = true "Real-world examples" begin - @testset "Brusselator" begin - include("brusselator.jl") + if GROUP in ("Core", "All") + @testset "Brusselator" begin + include("brusselator.jl") + end + @testset "Flux.jl" begin + include("flux.jl") + end end - @testset "Flux.jl" begin - include("flux.jl") + if GROUP in ("NLPModels", "All") + @testset "NLPModels" begin + include("nlpmodels.jl") + end end end @testset "ADTypes integration" begin - include("adtypes.jl") + GROUP in ("Core", "All") && include("adtypes.jl") end end