diff --git a/docs/Project.toml b/docs/Project.toml index 1e7c869d..9cfe5269 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -19,7 +19,7 @@ InfiniteOpt = "0.5" Ipopt = "1.6" HiGHS = "1" julia = "1.6" -JuMP = "1.22" +JuMP = "1.23" Literate = "2.18" Plots = "1" SpecialFunctions = "2" diff --git a/docs/src/guide/measure.md b/docs/src/guide/measure.md index 36b22365..fac3eeca 100644 --- a/docs/src/guide/measure.md +++ b/docs/src/guide/measure.md @@ -342,12 +342,12 @@ tmodel = transformation_model(model); # output A JuMP Model -Minimization problem with: -Variables: 3 -Objective function type: QuadExpr -Model mode: AUTOMATIC -CachingOptimizer state: NO_OPTIMIZER -Solver name: No optimizer attached. +├ solver: none +├ objective_sense: MIN_SENSE +│ └ objective_function_type: QuadExpr +├ num_variables: 3 +├ num_constraints: 0 +└ Names registered in the model: none ``` @@ -384,12 +384,12 @@ trans_m = transformation_model(model); # output A JuMP Model -Minimization problem with: -Variables: 5 -Objective function type: QuadExpr -Model mode: AUTOMATIC -CachingOptimizer state: NO_OPTIMIZER -Solver name: No optimizer attached. +├ solver: none +├ objective_sense: MIN_SENSE +│ └ objective_function_type: QuadExpr +├ num_variables: 5 +├ num_constraints: 0 +└ Names registered in the model: none ``` Now let's look again at the number of supports, the transcription of `u`, and the @@ -442,12 +442,12 @@ tmodel = transformation_model(model); # output A JuMP Model -Minimization problem with: -Variables: 2 -Objective function type: QuadExpr -Model mode: AUTOMATIC -CachingOptimizer state: NO_OPTIMIZER -Solver name: No optimizer attached. +├ solver: none +├ objective_sense: MIN_SENSE +│ └ objective_function_type: QuadExpr +├ num_variables: 2 +├ num_constraints: 0 +└ Names registered in the model: none ``` Then we get the supports are consistent for `u` and the integral: diff --git a/docs/src/guide/model.md b/docs/src/guide/model.md index 56217af1..42acb990 100644 --- a/docs/src/guide/model.md +++ b/docs/src/guide/model.md @@ -32,7 +32,7 @@ Feasibility problem with: Measures: 0 Transformation backend information: Backend type: TranscriptionBackend - Solver name: No optimizer attached. + Solver: none Transformation built and up-to-date: false ``` Ultimately, `model` will be solved via a transformation backend. By default, @@ -52,7 +52,7 @@ Feasibility problem with: Measures: 0 Transformation backend information: Backend type: TranscriptionBackend - Solver name: No optimizer attached. + Solver: none Transformation built and up-to-date: false ``` @@ -72,7 +72,7 @@ Feasibility problem with: Measures: 0 Transformation backend information: Backend type: TranscriptionBackend - Solver name: Ipopt + Solver: Ipopt Transformation built and up-to-date: false ``` For completeness, the table of currently supported JuMP compatible optimizers @@ -96,7 +96,7 @@ Feasibility problem with: Measures: 0 Transformation backend information: Backend type: TranscriptionBackend - Solver name: Ipopt + Solver: Ipopt Transformation built and up-to-date: false ``` @@ -120,11 +120,11 @@ julia> using InfiniteOpt, Ipopt julia> backend = TranscriptionBackend(Ipopt.Optimizer) A TranscriptionBackend that uses a A JuMP Model -Feasibility problem with: -Variables: 0 -Model mode: AUTOMATIC -CachingOptimizer state: EMPTY_OPTIMIZER -Solver name: Ipopt +├ solver: Ipopt +├ objective_sense: FEASIBILITY_SENSE +├ num_variables: 0 +├ num_constraints: 0 +└ Names registered in the model: none ``` We query the underlying transformation backend, transformation model, and transformation @@ -137,19 +137,19 @@ julia> using InfiniteOpt; model = InfiniteModel(); julia> tbackend = transformation_backend(model) A TranscriptionBackend that uses a A JuMP Model -Feasibility problem with: -Variables: 0 -Model mode: AUTOMATIC -CachingOptimizer state: NO_OPTIMIZER -Solver name: No optimizer attached. +├ solver: none +├ objective_sense: FEASIBILITY_SENSE +├ num_variables: 0 +├ num_constraints: 0 +└ Names registered in the model: none julia> tmodel = transformation_model(model) A JuMP Model -Feasibility problem with: -Variables: 0 -Model mode: AUTOMATIC -CachingOptimizer state: NO_OPTIMIZER -Solver name: No optimizer attached. +├ solver: none +├ objective_sense: FEASIBILITY_SENSE +├ num_variables: 0 +├ num_constraints: 0 +└ Names registered in the model: none julia> data = transformation_data(model); ``` @@ -163,11 +163,11 @@ julia> set_transformation_backend(model, TranscriptionBackend(Ipopt.Optimizer)) julia> tbackend = transformation_backend(model) A TranscriptionBackend that uses a A JuMP Model -Feasibility problem with: -Variables: 0 -Model mode: AUTOMATIC -CachingOptimizer state: EMPTY_OPTIMIZER -Solver name: Ipopt +├ solver: Ipopt +├ objective_sense: FEASIBILITY_SENSE +├ num_variables: 0 +├ num_constraints: 0 +└ Names registered in the model: none ``` Again, since `TranscriptionBackend` is the default, the following models are equivalent: ```jldoctest @@ -187,7 +187,7 @@ Feasibility problem with: Measures: 0 Transformation backend information: Backend type: TranscriptionBackend - Solver name: Ipopt + Solver: Ipopt Transformation built and up-to-date: false ``` diff --git a/docs/src/guide/optimize.md b/docs/src/guide/optimize.md index 39324924..da3167cc 100644 --- a/docs/src/guide/optimize.md +++ b/docs/src/guide/optimize.md @@ -97,27 +97,27 @@ let's extract the transformation backend from the example above in the basic usa julia> backend = transformation_backend(model) A TranscriptionBackend that uses a A JuMP Model -Minimization problem with: -Variables: 11 -Objective function type: AffExpr -`AffExpr`-in-`MathOptInterface.EqualTo{Float64}`: 1 constraint -`AffExpr`-in-`MathOptInterface.GreaterThan{Float64}`: 10 constraints -`VariableRef`-in-`MathOptInterface.GreaterThan{Float64}`: 11 constraints -Model mode: AUTOMATIC -CachingOptimizer state: ATTACHED_OPTIMIZER -Solver name: Ipopt +├ solver: Ipopt +├ objective_sense: MIN_SENSE +│ └ objective_function_type: AffExpr +├ num_variables: 11 +├ num_constraints: 22 +│ ├ AffExpr in MOI.EqualTo{Float64}: 1 +│ ├ AffExpr in MOI.GreaterThan{Float64}: 10 +│ └ VariableRef in MOI.GreaterThan{Float64}: 11 +└ Names registered in the model: none julia> tmodel = transformation_model(model) A JuMP Model -Minimization problem with: -Variables: 11 -Objective function type: AffExpr -`AffExpr`-in-`MathOptInterface.EqualTo{Float64}`: 1 constraint -`AffExpr`-in-`MathOptInterface.GreaterThan{Float64}`: 10 constraints -`VariableRef`-in-`MathOptInterface.GreaterThan{Float64}`: 11 constraints -Model mode: AUTOMATIC -CachingOptimizer state: ATTACHED_OPTIMIZER -Solver name: Ipopt +├ solver: Ipopt +├ objective_sense: MIN_SENSE +│ └ objective_function_type: AffExpr +├ num_variables: 11 +├ num_constraints: 22 +│ ├ AffExpr in MOI.EqualTo{Float64}: 1 +│ ├ AffExpr in MOI.GreaterThan{Float64}: 10 +│ └ VariableRef in MOI.GreaterThan{Float64}: 11 +└ Names registered in the model: none ``` The `JuMP` variable(s) stored in the transformation backend that correspond to a diff --git a/docs/src/guide/transcribe.md b/docs/src/guide/transcribe.md index 51793e2a..71e33871 100644 --- a/docs/src/guide/transcribe.md +++ b/docs/src/guide/transcribe.md @@ -68,16 +68,16 @@ julia> build_transformation_backend!(inf_model) julia> trans_model = transformation_model(inf_model) A JuMP Model -Minimization problem with: -Variables: 4 -Objective function type: AffExpr -`AffExpr`-in-`MathOptInterface.EqualTo{Float64}`: 1 constraint -`QuadExpr`-in-`MathOptInterface.LessThan{Float64}`: 3 constraints -`VariableRef`-in-`MathOptInterface.GreaterThan{Float64}`: 3 constraints -`VariableRef`-in-`MathOptInterface.ZeroOne`: 1 constraint -Model mode: AUTOMATIC -CachingOptimizer state: NO_OPTIMIZER -Solver name: No optimizer attached. +├ solver: none +├ objective_sense: MIN_SENSE +│ └ objective_function_type: AffExpr +├ num_variables: 4 +├ num_constraints: 8 +│ ├ AffExpr in MOI.EqualTo{Float64}: 1 +│ ├ QuadExpr in MOI.LessThan{Float64}: 3 +│ ├ VariableRef in MOI.GreaterThan{Float64}: 3 +│ └ VariableRef in MOI.ZeroOne: 1 +└ Names registered in the model: none julia> print(trans_model) Min 2 z + y(0.0) + y(5.0) + y(10.0) @@ -357,27 +357,27 @@ which wraps [`build_transcription_backend!`](@ref InfiniteOpt.TranscriptionOpt.b julia> backend1 = TranscriptionBackend() # make an empty backend A TranscriptionBackend that uses a A JuMP Model -Feasibility problem with: -Variables: 0 -Model mode: AUTOMATIC -CachingOptimizer state: NO_OPTIMIZER -Solver name: No optimizer attached. +├ solver: none +├ objective_sense: FEASIBILITY_SENSE +├ num_variables: 0 +├ num_constraints: 0 +└ Names registered in the model: none julia> build_transformation_backend!(inf_model); julia> backend2 = transformation_backend(inf_model) # generate from an InfiniteModel A TranscriptionBackend that uses a A JuMP Model -Minimization problem with: -Variables: 4 -Objective function type: AffExpr -`AffExpr`-in-`MathOptInterface.EqualTo{Float64}`: 1 constraint -`QuadExpr`-in-`MathOptInterface.LessThan{Float64}`: 3 constraints -`VariableRef`-in-`MathOptInterface.GreaterThan{Float64}`: 3 constraints -`VariableRef`-in-`MathOptInterface.ZeroOne`: 1 constraint -Model mode: AUTOMATIC -CachingOptimizer state: NO_OPTIMIZER -Solver name: No optimizer attached. +├ solver: none +├ objective_sense: MIN_SENSE +│ └ objective_function_type: AffExpr +├ num_variables: 4 +├ num_constraints: 8 +│ ├ AffExpr in MOI.EqualTo{Float64}: 1 +│ ├ QuadExpr in MOI.LessThan{Float64}: 3 +│ ├ VariableRef in MOI.GreaterThan{Float64}: 3 +│ └ VariableRef in MOI.ZeroOne: 1 +└ Names registered in the model: none ``` The call to `build_transformation_backend!` is the backbone behind infinite model transformation and is what encapsulates all the methods to @@ -391,11 +391,11 @@ via [`transformation_model`](@ref): ```jldoctest transcribe; setup = :(empty!(inf_model.backend)) julia> transformation_model(inf_model) A JuMP Model -Feasibility problem with: -Variables: 0 -Model mode: AUTOMATIC -CachingOptimizer state: NO_OPTIMIZER -Solver name: No optimizer attached. +├ solver: none +├ objective_sense: FEASIBILITY_SENSE +├ num_variables: 0 +├ num_constraints: 0 +└ Names registered in the model: none ``` Here we observe that such a model is currently empty and hasn't been populated yet. diff --git a/docs/src/tutorials/quick_start.md b/docs/src/tutorials/quick_start.md index 1314ab8b..98ce0777 100644 --- a/docs/src/tutorials/quick_start.md +++ b/docs/src/tutorials/quick_start.md @@ -72,7 +72,7 @@ Feasibility problem with: Measures: 0 Transformation backend information: Backend type: TranscriptionBackend - Solver name: Ipopt + Solver: Ipopt Transformation built and up-to-date: false ``` Learn more about `InfiniteModel`s and optimizers on our diff --git a/src/TranscriptionOpt/model.jl b/src/TranscriptionOpt/model.jl index 49d6581a..a6441656 100644 --- a/src/TranscriptionOpt/model.jl +++ b/src/TranscriptionOpt/model.jl @@ -130,6 +130,20 @@ function TranscriptionBackend(optimizer_constructor; kwargs...) return InfiniteOpt.JuMPBackend{Transcription}(model, TranscriptionData()) end +# Get the solver name from MOI +# Inspired by https://github.com/jump-dev/JuMP.jl/blob/ce946b7092c45bdac916c9b531a13a5b929d45f0/src/print.jl#L281-L291 +function _try_solver_name(model) + if mode(model) != JuMP.DIRECT && + MOI.Utilities.state(backend(model)) == MOI.Utilities.NO_OPTIMIZER + return "none" + end + try + return MOI.get(backend(model), MOI.SolverName()) + catch + return "unknown" + end +end + # Printing function JuMP.show_backend_summary( io::IO, @@ -148,9 +162,7 @@ function JuMP.show_backend_summary( # TODO add approximation method info (requires InfiniteOpt refactoring) end # solver name - moi_summary = sprint(JuMP.show_backend_summary, backend.model) - solver_str = filter(startswith("Solver"), split(moi_summary, "\n"))[1] - println(io, " ", solver_str) + println(io, " Solver: ", _try_solver_name(backend.model)) return end diff --git a/src/backends.jl b/src/backends.jl index e660f0e8..7ec0524d 100644 --- a/src/backends.jl +++ b/src/backends.jl @@ -55,11 +55,11 @@ Return the underlying model used by the transformation backend. ```julia-repl julia> trans_model = transformation_model(model) A JuMP Model -Feasibility problem with: -Variables: 0 -Model mode: AUTOMATIC -CachingOptimizer state: NO_OPTIMIZER -Solver name: No optimizer attached. +├ solver: none +├ objective_sense: FEASIBILITY_SENSE +├ num_variables: 0 +├ num_constraints: 0 +└ Names registered in the model: none ``` """ transformation_model(model::InfiniteModel) = transformation_model(model.backend) @@ -106,22 +106,22 @@ will be removed. julia> transformation_backend(model) A TranscriptionBackend that uses a A JuMP Model -Feasibility problem with: -Variables: 0 -Model mode: AUTOMATIC -CachingOptimizer state: NO_OPTIMIZER -Solver name: No optimizer attached. +├ solver: none +├ objective_sense: FEASIBILITY_SENSE +├ num_variables: 0 +├ num_constraints: 0 +└ Names registered in the model: none julia> set_transformation_backend(model, TranscriptionBackend(Ipopt.Optimizer)) julia> transformation_backend(model) A TranscriptionBackend that uses a A JuMP Model -Feasibility problem with: -Variables: 0 -Model mode: AUTOMATIC -CachingOptimizer state: EMPTY_OPTIMIZER -Solver name: Ipopt +├ solver: Ipopt +├ objective_sense: FEASIBILITY_SENSE +├ num_variables: 0 +├ num_constraints: 0 +└ Names registered in the model: none ``` """ function set_transformation_backend( @@ -145,11 +145,11 @@ Retrieve the transformation backend used by the `model`. julia> transformation_backend(model) A TranscriptionBackend that uses a A JuMP Model -Feasibility problem with: -Variables: 0 -Model mode: AUTOMATIC -CachingOptimizer state: NO_OPTIMIZER -Solver name: No optimizer attached. +├ solver: none +├ objective_sense: FEASIBILITY_SENSE +├ num_variables: 0 +├ num_constraints: 0 +└ Names registered in the model: none ``` """ function transformation_backend(model::InfiniteModel) @@ -664,11 +664,11 @@ julia> set_optimizer(model, HiGHS.Optimizer) julia> transformation_model(model) A JuMP Model -Feasibility problem with: -Variables: 0 -Model mode: AUTOMATIC -CachingOptimizer state: EMPTY_OPTIMIZER -Solver name: HiGHS +├ solver: HiGHS +├ objective_sense: FEASIBILITY_SENSE +├ num_variables: 0 +├ num_constraints: 0 +└ Names registered in the model: none ``` """ function JuMP.set_optimizer( diff --git a/src/constraints.jl b/src/constraints.jl index 8efc96f7..ebe777c4 100644 --- a/src/constraints.jl +++ b/src/constraints.jl @@ -11,18 +11,21 @@ Extend `JuMP.owner_model` to return the infinite model associated with `cref`. julia> model = owner_model(cref) An InfiniteOpt Model Minimization problem with: -Finite Parameters: 0 -Infinite Parameters: 3 -Variables: 3 -Derivatives: 0 -Measures: 0 -Objective function type: GeneralVariableRef -`GenericAffExpr{Float64,GeneralVariableRef}`-in-`MathOptInterface.EqualTo{Float64}`: 1 constraint -Names registered in the model: g, t, h, x -Optimizer model backend information: -Model mode: AUTOMATIC -CachingOptimizer state: NO_OPTIMIZER -Solver name: No optimizer attached. + Finite parameters: 0 + Infinite parameter: 1 + Variables: 3 + Derivatives: 0 + Measures: 0 + Objective function type: GenericAffExpr{Float64, GeneralVariableRef} + `GenericAffExpr{Float64, GeneralVariableRef}`-in-`MathOptInterface.GreaterThan{Float64}`: 1 constraint + `GenericAffExpr{Float64, GeneralVariableRef}`-in-`MathOptInterface.EqualTo{Float64}`: 1 constraint + `GeneralVariableRef`-in-`MathOptInterface.GreaterThan{Float64}`: 3 constraints +Names registered in the model: c1, c2, t, y, z +Transformation backend information: + Backend type: TranscriptionBackend + `t` transcribed over 10 supports + Solver: Ipopt + Transformation built and up-to-date: true ``` """ JuMP.owner_model(cref::InfOptConstraintRef) = cref.model diff --git a/src/datatypes.jl b/src/datatypes.jl index 3ccc6b6c..7eaef9eb 100644 --- a/src/datatypes.jl +++ b/src/datatypes.jl @@ -1430,7 +1430,7 @@ Feasibility problem with: Measures: 0 Transformation backend information: Backend type: TranscriptionBackend - Solver name: No optimizer attached. + Solver: none Transformation built and up-to-date: false julia> model = InfiniteModel(Ipopt.Optimizer) @@ -1443,7 +1443,7 @@ Feasibility problem with: Measures: 0 Transformation backend information: Backend type: TranscriptionBackend - Solver name: Ipopt + Solver: Ipopt Transformation built and up-to-date: false ``` """ diff --git a/src/general_variables.jl b/src/general_variables.jl index 8076c477..c926fea5 100644 --- a/src/general_variables.jl +++ b/src/general_variables.jl @@ -81,19 +81,22 @@ Extend `JuMP.owner_model` to return the model where `vref` is stored. ```julia-repl julia> owner_model(vref) An InfiniteOpt Model -Feasibility problem with: -Finite Parameters: 0 -Infinite Parameters: 0 -Variable: 1 -Derivatives: 0 -Measures: 0 -`FiniteVariableRef`-in-`MathOptInterface.GreaterThan{Float64}`: 1 constraint -`FiniteVariableRef`-in-`MathOptInterface.LessThan{Float64}`: 1 constraint -Names registered in the model: vref -Optimizer model backend information: -Model mode: AUTOMATIC -CachingOptimizer state: NO_OPTIMIZER -Solver name: No optimizer attached. +Minimization problem with: + Finite parameters: 0 + Infinite parameter: 1 + Variables: 3 + Derivatives: 0 + Measures: 0 + Objective function type: GenericAffExpr{Float64, GeneralVariableRef} + `GenericAffExpr{Float64, GeneralVariableRef}`-in-`MathOptInterface.GreaterThan{Float64}`: 1 constraint + `GenericAffExpr{Float64, GeneralVariableRef}`-in-`MathOptInterface.EqualTo{Float64}`: 1 constraint + `GeneralVariableRef`-in-`MathOptInterface.GreaterThan{Float64}`: 3 constraints +Names registered in the model: c1, c2, t, y, z +Transformation backend information: + Backend type: TranscriptionBackend + `t` transcribed over 10 supports + Solver: Ipopt + Transformation built and up-to-date: true ``` """ function JuMP.owner_model(vref::GeneralVariableRef)::InfiniteModel diff --git a/test/TranscriptionOpt/model.jl b/test/TranscriptionOpt/model.jl index 40f85f2c..9ab47d73 100644 --- a/test/TranscriptionOpt/model.jl +++ b/test/TranscriptionOpt/model.jl @@ -23,7 +23,15 @@ end # test JuMP.show_backend_summary @testset "JuMP.show_backend_summary" begin expected = " Backend type: TranscriptionBackend\n `t` transcribed over 4 supports\n " * - "`x` transcribed over 5 supports\n Solver name: No optimizer attached.\n" + "`x` transcribed over 5 supports\n Solver: none.\n" + @test sprint(show_backend_summary, m, tb) == expected + set_optimizer(tb.model, () -> MOIU.MockOptimizer(MOIU.Model{Float64}())) + expected = " Backend type: TranscriptionBackend\n `t` transcribed over 4 supports\n " * + "`x` transcribed over 5 supports\n Solver: Mock.\n" + @test sprint(show_backend_summary, m, tb) == expected + set_optimizer(tb.model, () -> MOIU.Model{Float64}()) + expected = " Backend type: TranscriptionBackend\n `t` transcribed over 4 supports\n " * + "`x` transcribed over 5 supports\n Solver: unknown.\n" @test sprint(show_backend_summary, m, tb) == expected end # test show