Skip to content

JuMP or MOI? #6

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
odow opened this issue Mar 11, 2025 · 3 comments · May be fixed by #19
Open

JuMP or MOI? #6

odow opened this issue Mar 11, 2025 · 3 comments · May be fixed by #19
Assignees

Comments

@odow
Copy link
Member

odow commented Mar 11, 2025

I'm starting to wonder if this should be at the MOI level with a thin JuMP shim to get back the JuMP references?

IIS, dual feasibility, starting point, coefficient analysis. These can all be MOI level analysis tools.

@joaquimg
Copy link
Member

I like the idea of it being on the MOI level, with a thin JuMP thing.

The one w need to be a bit more careful is dual feasibility, as primal feasibility is in JuMP. We can re-write primal feasibility in JuMP.

@odow
Copy link
Member Author

odow commented Mar 11, 2025

I've been messing around with this:

# Copyright (c) 2025: Joaquim Garcia, Oscar Dowson and contributors
#
# Use of this source code is governed by an MIT-style license that can be found
# in the LICENSE.md file or at https://opensource.org/licenses/MIT.

struct StartPointAnalyzer <: AbstractModelAnalyzer end

struct StartPointAnalyzerResult <: AbstractModelAnalyzerResult
    x::Vector{MOI.VariableIndex}
    constraints::Vector{Any}
    f::Float64
    ∇f::Vector{Float64}
    g::Vector{Float64}
    J_struc::Vector{Tuple{Int,Int}}
    J::Vector{Float64}
end

struct ObjectiveNotFinite <: AbstractModelIssue
    value::Float64
end

function summarize(io::IO, err::ObjectiveNotFinite)
    return println(io, "The objective is not finite.\n\tf = $(err.value)")
end

struct ObjectiveGradientNotFinite <: AbstractModelIssue
    x::MOI.VariableIndex
    value::Float64
end

function summarize(io::IO, err::ObjectiveGradientNotFinite)
    return println(
        io,
        MOI.Utilities.replace_acronym(
            "The objective gradient w.r.t. $(err.x) is not finite.\n\t∇f($(err.x)) = $(err.value)",
        ),
    )
end

struct ConstraintNotFinite{F,S} <: AbstractModelIssue
    ci::MOI.ConstraintIndex{F,S}
    value::Float64
end

function summarize(io::IO, err::ConstraintNotFinite)
    return println(
        io,
        MOI.Utilities.replace_acronym(
            "The value of constraint $(err.ci) is not finite.\n\tGot $(err.value)",
        ),
    )
end

struct ConstraintJacobianNotFinite{F,S} <: AbstractModelIssue
    ci::MOI.ConstraintIndex{F,S}
    x::MOI.VariableIndex
    value::Float64
end

function summarize(io::IO, err::ConstraintJacobianNotFinite)
    return println(
        io,
        MOI.Utilities.replace_acronym(
            "The gradient of constraint $(err.ci) w.r.t. $(err.x) is not finite.\n\tGot $(err.value)",
        ),
    )
end

function summarize(io::IO, result::StartPointAnalyzerResult)
    for issue in list_of_issues(result)
        summarize(io, issue)
    end
    return
end

function list_of_issues(result::StartPointAnalyzerResult)
    ret = AbstractModelIssue[]
    if !isfinite(result.f)
        push!(ret, ObjectiveNotFinite(result.f))
    end
    for (xi, ∇fi) in zip(result.x, result.∇f)
        if !isfinite(∇fi)
            push!(ret, ObjectiveGradientNotFinite(xi, ∇fi))
        end
    end
    for (ci, gi) in zip(result.constraints, result.g)
        if !isfinite(gi)
            push!(ret, ConstraintNotFinite(ci[2], gi))
        end
    end
    for ((row, col), Ji) in zip(result.J_struc, result.J)
        if !isfinite(Ji)
            ci, xi = result.constraints[row][2], result.x[col]
            push!(ret, ConstraintJacobianNotFinite(ci, xi, Ji))
        end
    end
    return ret
end

function analyze(::StartPointAnalyzer, model::MOI.ModelLike)
    x = MOI.get(model, MOI.ListOfVariableIndices())
    start = something.(MOI.get.(model, MOI.VariablePrimalStart(), x), 0.0)
    nlp = MOI.Nonlinear.Model()
    constraints = Any[]
    for (F, S) in MOI.get(model, MOI.ListOfConstraintTypesPresent())
        for ci in MOI.get(model, MOI.ListOfConstraintIndices{F,S}())
            f = MOI.get(model, MOI.ConstraintFunction(), ci)
            s = MOI.get(model, MOI.ConstraintSet(), ci)
            nlp_ci = MOI.Nonlinear.add_constraint(nlp, f, s)
            push!(constraints, nlp_ci => ci)
        end
    end
    for attr in MOI.get(model, MOI.ListOfModelAttributesSet())
        if attr isa MOI.ObjectiveFunction
            obj_f = MOI.get(model, attr)
            MOI.Nonlinear.set_objective(nlp, obj_f)
        end
    end
    backend = MOI.Nonlinear.SparseReverseMode()
    evaluator = MOI.Nonlinear.Evaluator(nlp, backend, x)
    MOI.initialize(evaluator, [:Grad, :Jac])
    J_struc = MOI.jacobian_structure(evaluator)
    J = zeros(length(J_struc))
    ∇f = zeros(length(x))
    g = zeros(length(constraints))
    f = MOI.eval_objective(evaluator, start)
    MOI.eval_objective_gradient(evaluator, ∇f, start)
    MOI.eval_constraint(evaluator, g, start)
    MOI.eval_constraint_jacobian(evaluator, J, start)
    return StartPointAnalyzerResult(
        x,
        constraints,
        f,
        ∇f,
        g,
        J_struc,
        J,
    )
end
    model = MOI.Utilities.UniversalFallback(MOI.Utilities.Model{Float64}())
    x = MOI.add_variable(model)
    MOI.set(model, MOI.VariablePrimalStart(), x, -1.0)
    f = MOI.ScalarNonlinearFunction(:log, Any[x + 1.0])
    MOI.set(model, MOI.ObjectiveSense(), MOI.MAX_SENSE)
    MOI.set(model, MOI.ObjectiveFunction{typeof(f)}(), f)
    g = MOI.ScalarNonlinearFunction(:*, Any[x, f])
    MOI.add_constraint(model, g, MOI.LessThan(0.0))
    ret = ModelAnalyzer.analyze(ModelAnalyzer.StartPointAnalyzer(), model)
    ModelAnalyzer.summarize(stdout, ret)
The objective is not finite.
	f = -Inf
The objective gradient w.r.t. MOI.VariableIndex(1) is not finite.
	∇f(MOI.VariableIndex(1)) = Inf
The value of constraint MOI.ConstraintIndex{MOI.ScalarNonlinearFunction, MOI.LessThan{Float64}}(1) is not finite.
	Got Inf
The gradient of constraint MOI.ConstraintIndex{MOI.ScalarNonlinearFunction, MOI.LessThan{Float64}}(1) w.r.t. MOI.VariableIndex(1) is not finite.
	Got -Inf

I think the thing would be to make everything as programatic as possible, with the prints trivially implemented so that JuMP can re-define the printing with the appropriate references.

@joaquimg
Copy link
Member

@joaquimg joaquimg linked a pull request Apr 30, 2025 that will close this issue
@joaquimg joaquimg self-assigned this May 5, 2025
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Development

Successfully merging a pull request may close this issue.

2 participants