From 2490bad13fae150d651a8c4d4d8504fbdf05018b Mon Sep 17 00:00:00 2001 From: pulsipher Date: Sun, 14 Jul 2024 01:38:47 -0400 Subject: [PATCH 1/6] remove ndarray keyword argument --- src/TranscriptionOpt/model.jl | 523 ++++++++++------------------- src/TranscriptionOpt/transcribe.jl | 249 +++++++------- 2 files changed, 304 insertions(+), 468 deletions(-) diff --git a/src/TranscriptionOpt/model.jl b/src/TranscriptionOpt/model.jl index e47d880f..1fcb72d3 100644 --- a/src/TranscriptionOpt/model.jl +++ b/src/TranscriptionOpt/model.jl @@ -9,49 +9,13 @@ that has been transcribed to a regular `JuMP.Model` that contains the transcribed variables. This is stored in the `data` field of [`InfiniteOpt.JuMPBackend`](@ref) to make what is called a `TranscriptionBackend` via the [`TranscriptionBackend`](@ref) constructor. - -**Fields** -- `infvar_lookup::Dict{InfiniteOpt.GeneralVariableRef, Dict{Vector{Float64}, Int}}`: - A lookup table of infinite variable transcriptions via support value. -- `infvar_mappings::Dict{InfiniteOpt.GeneralVariableRef, Vector{JuMP.VariableRef}}`: - Map infinite variables to their transcription variables. -- `infvar_supports::Dict{InfiniteOpt.GeneralVariableRef, Vector{Tuple}}`: - Map infinite variables to their support values. -- `infvar_support_labels::Dict{InfiniteOpt.GeneralVariableRef, Vector{Set{DataType}}}`: - Map the infinite variables to their support labels. -- `finvar_mappings::Dict{InfiniteOpt.GeneralVariableRef, JuMP.VariableRef}`: - Map finite variables to their transcription variables. -- `semi_infinite_vars::Vector{InfiniteOpt.SemiInfiniteVariable{InfiniteOpt.GeneralVariableRef}}`: - Store the core semi-infinite variable objects of semi-infinite variables formed on transcription. -- `semi_lookup::Dict{Tuple{InfiniteOpt.GeneralVariableRef, Dict{Int, Float64}}, InfiniteOpt.GeneralVariableRef}`: - Lookup which semi-infinite variables have already been added. -- `last_point_index::Int`: The last internal point variable index added. -- `point_lookup::Dict{Tuple{InfiniteOpt.GeneralVariableRef, Vector{Float64}}, InfiniteOpt.GeneralVariableRef}`: - Lookup which point variables have already been created internally. -- `measure_lookup::Dict{InfiniteOpt.GeneralVariableRef, Dict{Vector{Float64}, Int}}`: - A lookup table of measure transcriptions via support value. -- `measure_mappings::Dict{InfiniteOpt.GeneralVariableRef, Vector{JuMP.AbstractJuMPScalar}}`: - Map measures to transcription expressions. -- `measure_supports::Dict{InfiniteOpt.GeneralVariableRef, Vector{Tuple}}`: - Map measures to their supports values (if the transcribed measure is still infinite). -- `measure_support_labels::Dict{InfiniteOpt.GeneralVariableRef, Vector{Set{DataType}}}`: - Map measures to their support labels if they have any. -- `constr_mappings::Dict{InfiniteOpt.InfOptConstraintRef, Vector{JuMP.ConstraintRef}}`: - Map constraints to their transcriptions. -- `constr_supports::Dict{InfiniteOpt.InfOptConstraintRef, Vector{Tuple}}`: - Map constraints to their support values. -- `constr_support_labels::Dict{InfiniteOpt.InfOptConstraintRef, Vector{Set{DataType}}}`: - Map constraints to their support labels. -- `supports::Tuple`: Store the collected parameter supports here. -- `support_labels::Tuple`: Store the collected parameter labels here. -- `has_internal_supports::Bool`: Where any internal supports collected? """ mutable struct TranscriptionData # Variable information - infvar_lookup::Dict{InfiniteOpt.GeneralVariableRef, Dict{Vector{Float64}, Int}} - infvar_mappings::Dict{InfiniteOpt.GeneralVariableRef, Vector{JuMP.VariableRef}} - infvar_supports::Dict{InfiniteOpt.GeneralVariableRef, Vector{Tuple}} - infvar_support_labels::Dict{InfiniteOpt.GeneralVariableRef, Vector{Set{DataType}}} + infvar_lookup::Dict{InfiniteOpt.GeneralVariableRef, Dict{Vector{Float64}, JuMP.VariableRef}} + infvar_mappings::Dict{InfiniteOpt.GeneralVariableRef, Array{JuMP.VariableRef}} + infvar_supports::Dict{InfiniteOpt.GeneralVariableRef, Array{Tuple}} + infvar_support_labels::Dict{InfiniteOpt.GeneralVariableRef, Array{Set{DataType}}} finvar_mappings::Dict{InfiniteOpt.GeneralVariableRef, JuMP.VariableRef} # Internal variables (created via internal measure expansions) @@ -62,17 +26,17 @@ mutable struct TranscriptionData # Measure information measure_lookup::Dict{InfiniteOpt.GeneralVariableRef, Dict{Vector{Float64}, Int}} - measure_mappings::Dict{InfiniteOpt.GeneralVariableRef, Vector{JuMP.AbstractJuMPScalar}} - measure_supports::Dict{InfiniteOpt.GeneralVariableRef, Vector{Tuple}} - measure_support_labels::Dict{InfiniteOpt.GeneralVariableRef, Vector{Set{DataType}}} + measure_mappings::Dict{InfiniteOpt.GeneralVariableRef, Array{JuMP.AbstractJuMPScalar}} + measure_supports::Dict{InfiniteOpt.GeneralVariableRef, Array{Tuple}} + measure_support_labels::Dict{InfiniteOpt.GeneralVariableRef, Array{Set{DataType}}} # Constraint information constr_mappings::Dict{InfiniteOpt.InfOptConstraintRef, - Vector{JuMP.ConstraintRef}} + Array{JuMP.ConstraintRef}} constr_supports::Dict{InfiniteOpt.InfOptConstraintRef, - Vector{Tuple}} + Array{Tuple}} constr_support_labels::Dict{InfiniteOpt.InfOptConstraintRef, - Vector{Set{DataType}}} + Array{Set{DataType}}} # Collected Supports supports::Tuple @@ -81,31 +45,32 @@ mutable struct TranscriptionData # Default constructor function TranscriptionData() - return new( # variable info - Dict{InfiniteOpt.GeneralVariableRef, Dict{Vector{Float64}, Int}}(), - Dict{InfiniteOpt.GeneralVariableRef, Vector{JuMP.VariableRef}}(), - Dict{InfiniteOpt.GeneralVariableRef, Vector{Tuple}}(), - Dict{InfiniteOpt.GeneralVariableRef, Vector{Set{DataType}}}(), - Dict{InfiniteOpt.GeneralVariableRef, JuMP.VariableRef}(), - # internal variables - Vector{InfiniteOpt.SemiInfiniteVariable{InfiniteOpt.GeneralVariableRef}}(), - Dict{Tuple{InfiniteOpt.GeneralVariableRef, Dict{Int, Float64}}, InfiniteOpt.GeneralVariableRef}(), - 0, - Dict{Tuple{InfiniteOpt.GeneralVariableRef, Vector{Float64}}, InfiniteOpt.GeneralVariableRef}(), - # measure info - Dict{InfiniteOpt.GeneralVariableRef, Dict{Vector{Float64}, Int}}(), - Dict{InfiniteOpt.GeneralVariableRef, Vector{JuMP.AbstractJuMPScalar}}(), - Dict{InfiniteOpt.GeneralVariableRef, Vector{Tuple}}(), - Dict{InfiniteOpt.GeneralVariableRef, Vector{Set{DataType}}}(), - # constraint info - Dict{InfiniteOpt.InfOptConstraintRef, Vector{JuMP.ConstraintRef}}(), - Dict{InfiniteOpt.InfOptConstraintRef, Vector{Vector{Float64}}}(), - Dict{InfiniteOpt.InfOptConstraintRef, Vector{Set{DataType}}}(), - # support storage - (), - (), - false - ) + return new( + # variable info + Dict{InfiniteOpt.GeneralVariableRef, Dict{Vector{Float64}, JuMP.VariableRef}}(), + Dict{InfiniteOpt.GeneralVariableRef, Array{JuMP.VariableRef}}(), + Dict{InfiniteOpt.GeneralVariableRef, Array{Tuple}}(), + Dict{InfiniteOpt.GeneralVariableRef, Array{Set{DataType}}}(), + Dict{InfiniteOpt.GeneralVariableRef, JuMP.VariableRef}(), + # internal variables + Vector{InfiniteOpt.SemiInfiniteVariable{InfiniteOpt.GeneralVariableRef}}(), + Dict{Tuple{InfiniteOpt.GeneralVariableRef, Dict{Int, Float64}}, InfiniteOpt.GeneralVariableRef}(), + 0, + Dict{Tuple{InfiniteOpt.GeneralVariableRef, Vector{Float64}}, InfiniteOpt.GeneralVariableRef}(), + # measure info + Dict{InfiniteOpt.GeneralVariableRef, Dict{Vector{Float64}, Int}}(), + Dict{InfiniteOpt.GeneralVariableRef, Array{JuMP.AbstractJuMPScalar}}(), + Dict{InfiniteOpt.GeneralVariableRef, Array{Tuple}}(), + Dict{InfiniteOpt.GeneralVariableRef, Array{Set{DataType}}}(), + # constraint info + Dict{InfiniteOpt.InfOptConstraintRef, Array{JuMP.ConstraintRef}}(), + Dict{InfiniteOpt.InfOptConstraintRef, Array{Tuple}}(), + Dict{InfiniteOpt.InfOptConstraintRef, Array{Set{DataType}}}(), + # support storage + (), + (), + false, + ) end end @@ -233,12 +198,29 @@ function _ignore_label( label == InfiniteOpt.PublicLabel) end +## truncate a collection according to a label +# 0-Array +function _truncate_by_label(arr::Array{T, 0}, labels, label) where {T} + return labels[] <:label ? arr : arr[[]] +end + +# Vector +function _truncate_by_label(arr::Vector, labels, label) + inds = map(s -> any(l -> l <: label, s), labels) + return arr[inds] +end + +# Array +function _truncate_by_label(arr::Array{T, N}, labels, label) where {T, N} + firsts = (labels[(j == i ? Colon() : 1 for j in 1:N)...] for j in 1:N) + return arr[(findall(s -> any(l -> l <: label, s), sets) for sets in firsts)...] +end + """ transcription_variable( vref::InfiniteOpt.GeneralVariableRef, backend::TranscriptionBackend; - [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false] + [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel] ) Return the transcribed variable reference(s) corresponding to `vref`. Errors @@ -246,17 +228,14 @@ if no transcription variable is found. Also can query via the syntax: ```julia transcription_variable( vref::InfiniteOpt.GeneralVariableRef; - [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false] + [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel] ) ``` If the infinite model contains a built `TranscriptionBackend`. By default, this method returns only transcribed variables associated with public supports. All the variables can be returned by setting `label = All`. -If `vref` is infinite and `ndarray = true` then an n-dimensional array will be -returned in accordance with the infinite parameters that have unique object -numbers. In this case, `label` will be used to search the intersection of variable +If `vref` is infinite, then `label` will be used to search the intersection of variable supports that use the label. This is defers from the default behavior which considers the union. @@ -264,16 +243,16 @@ considers the union. ```julia-repl julia> transcription_variable(infvar, trans_backend) 2-element Array{VariableRef,1}: - infvar(support: 1) - infvar(support: 2) + infvar[1] + infvar[2] julia> transcription_variable(hdvar, trans_backend) hdvar julia> transcription_variable(infvar) 2-element Array{VariableRef,1}: - infvar(support: 1) - infvar(support: 2) + infvar[1] + infvar[2] julia> transcription_variable(hdvar) hdvar @@ -282,15 +261,13 @@ hdvar function transcription_variable( vref::InfiniteOpt.GeneralVariableRef, backend::TranscriptionBackend; - label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false + label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel ) return transcription_variable( vref, InfiniteOpt._index_type(vref), backend, - label, - ndarray + label ) end @@ -311,8 +288,7 @@ function transcription_variable( vref::InfiniteOpt.GeneralVariableRef, ::Type{V}, backend::TranscriptionBackend, - label::Type{<:InfiniteOpt.AbstractSupportLabel}, - ndarray::Bool + label::Type{<:InfiniteOpt.AbstractSupportLabel} ) where {V <: FinVarIndex} var = get(transcription_data(backend).finvar_mappings, vref, nothing) if isnothing(var) @@ -326,21 +302,17 @@ function transcription_variable( vref::InfiniteOpt.GeneralVariableRef, ::Type{V}, backend::TranscriptionBackend, - label::Type{<:InfiniteOpt.AbstractSupportLabel}, - ndarray::Bool + label::Type{<:InfiniteOpt.AbstractSupportLabel} ) where {V <: InfVarIndex} vars = get(transcription_data(backend).infvar_mappings, vref, nothing) if isnothing(vars) error("Variable reference $vref not used in transcription backend.") end - if ndarray - return make_ndarray(backend, vref, vars, label) - elseif _ignore_label(backend, label) + if _ignore_label(backend, label) return vars else labels = transcription_data(backend).infvar_support_labels[vref] - inds = map(s -> any(l -> l <: label, s), labels) - return vars[inds] + return _truncate_by_label(vars, labels, label) end end @@ -349,28 +321,26 @@ function transcription_variable( fref::InfiniteOpt.GeneralVariableRef, ::Type{InfiniteOpt.ParameterFunctionIndex}, backend::TranscriptionBackend, - label::Type{<:InfiniteOpt.AbstractSupportLabel}, - ndarray::Bool + label::Type{<:InfiniteOpt.AbstractSupportLabel} ) # get the parameter group integer indices of the expression and form the support iterator - group_int_idxs = InfiniteOpt.parameter_group_int_indices(fref) - support_indices = support_index_iterator(backend, group_int_idxs) - vals = Vector{Float64}(undef, length(support_indices)) - check_labels = length(vals) > 1 && !_ignore_label(backend, label) - label_inds = ones(Bool, length(vals)) + group_idxs = InfiniteOpt.parameter_group_int_indices(fref) + support_indices = support_index_iterator(backend, group_idxs) + dims = size(support_indices)[group_idxs] + vals = Array{Float64, length(dims)}(undef, dims...) + labels = Array{Set{DataType}, length(dims)}(undef, dims...) # iterate over the indices and compute the values - for (i, idx) in enumerate(support_indices) + for idx in support_indices supp = index_to_support(backend, idx) - if check_labels && !any(l -> l <: label, index_to_labels(backend, idx)) - @inbounds label_inds[i] = false - end - @inbounds vals[i] = transcription_expression(fref, backend, supp) + val_idx = idx.I[group_idxs] + @inbounds labels[val_idx...] = index_to_labels(backend, idx) + @inbounds vals[val_idx...] = transcription_expression(fref, backend, supp) end # return the values - if ndarray - return make_ndarray(backend, fref, vals, label) + if _ignore_label(backend, label) + return vals else - return vals[label_inds] + return _truncate_by_label(vals, labels, label) end end @@ -379,8 +349,7 @@ function transcription_variable( vref::InfiniteOpt.GeneralVariableRef, index_type, backend::TranscriptionBackend, - label, - ndarray + label ) error("`transcription_variable` not defined for variables with indices of " * "type $(index_type) and/or is not defined for labels of type $(label).") @@ -389,14 +358,12 @@ end # Dispatch for internal backends function transcription_variable( vref::InfiniteOpt.GeneralVariableRef; - label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false + label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel ) return transcription_variable( vref, JuMP.owner_model(vref).backend, - label = label, - ndarray = ndarray + label = label ) end @@ -404,8 +371,7 @@ end InfiniteOpt.transformation_variable( vref::InfiniteOpt.GeneralVariableRef, [backend::TranscriptionBackend]; - [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false] + [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel] ) Proper extension of [`InfiniteOpt.transformation_variable`](@ref) for @@ -414,23 +380,20 @@ Proper extension of [`InfiniteOpt.transformation_variable`](@ref) for function InfiniteOpt.transformation_variable( vref::InfiniteOpt.GeneralVariableRef, backend::TranscriptionBackend; - label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false + label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel ) - return transcription_variable(vref, backend, label = label, ndarray = ndarray) + return transcription_variable(vref, backend, label = label) end """ InfiniteOpt.variable_supports( vref::InfiniteOpt.DecisionVariableRef, backend::TranscriptionBackend; - [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false] + [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel] ) Return the support alias mapping associated with `vref` in the transcription backend. -Errors if `vref` does not have transcripted variables. See `transcription_variable` -for an explanation of `ndarray`. +Errors if `vref` does not have transcripted variables. """ function InfiniteOpt.variable_supports( dvref::Union{ @@ -439,60 +402,45 @@ function InfiniteOpt.variable_supports( InfiniteOpt.DerivativeRef }, backend::TranscriptionBackend; - label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false + label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel ) vref = InfiniteOpt.GeneralVariableRef(JuMP.owner_model(dvref), JuMP.index(dvref)) if !haskey(transcription_data(backend).infvar_mappings, vref) error("Variable reference $vref not used in transcription backend.") - elseif !haskey(transcription_data(backend).infvar_supports, vref) - prefs = InfiniteOpt.raw_parameter_refs(dvref) - lookups = transcription_data(backend).infvar_lookup[vref] - type = typeof(Tuple(first(keys(lookups)), prefs)) - supps = Vector{type}(undef, length(lookups)) - for (s, i) in lookups - supps[i] = Tuple(s, prefs) - end - transcription_data(backend).infvar_supports[vref] = supps end supps = transcription_data(backend).infvar_supports[vref] - if ndarray - return make_ndarray(backend, dvref, supps, label) - elseif _ignore_label(backend, label) - return supps - else + if _ignore_label(backend, label) + return vals + else labels = transcription_data(backend).infvar_support_labels[vref] - inds = map(s -> any(l -> l <: label, s), labels) - return supps[inds] + return _truncate_by_label(supps, labels, label) end end # ParameterFunctionRef function InfiniteOpt.variable_supports( - dvref::InfiniteOpt.ParameterFunctionRef, + fref::InfiniteOpt.ParameterFunctionRef, backend::TranscriptionBackend; - label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false + label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel ) # get the parameter group integer indices of the expression and form the support iterator - group_int_idxs = sort(InfiniteOpt.parameter_group_int_indices(dvref)) - support_indices = support_index_iterator(backend, group_int_idxs) - supps = Vector{Tuple}(undef, length(support_indices)) - check_labels = length(supps) > 1 && !_ignore_label(backend, label) + group_idxs = InfiniteOpt.parameter_group_int_indices(fref) + support_indices = support_index_iterator(backend, group_idxs) + dims = size(support_indices)[group_idxs] + supps = Array{Tuple, length(dims)}(undef, dims...) + labels = Array{Set{DataType}, length(dims)}(undef, dims...) param_supps = parameter_supports(backend) - label_inds = ones(Bool, length(supps)) # iterate over the indices and compute the values - for (i, idx) in enumerate(support_indices) - if check_labels && !any(l -> l <: label, index_to_labels(backend, idx)) - @inbounds label_inds[i] = false - end - @inbounds supps[i] = Tuple(param_supps[j][idx[j]] for j in group_int_idxs) + for idx in support_indices + val_idx = idx.I[group_idxs] + @inbounds labels[val_idx...] = index_to_labels(backend, idx) + @inbounds supps[val_idx...] = Tuple(param_supps[j][idx[j]] for j in group_idxs) end - # return the supports - if ndarray - return make_ndarray(backend, dvref, supps, label) + # return the values + if _ignore_label(backend, label) + return vals else - return supps[label_inds] + return _truncate_by_label(supps, labels, label) end end @@ -533,8 +481,7 @@ function lookup_by_support( if !haskey(transcription_data(backend).infvar_lookup, vref) error("Variable reference $vref not used in transcription backend.") end - idx = get(_supp_error, transcription_data(backend).infvar_lookup[vref], support) - return transcription_data(backend).infvar_mappings[vref][idx] + return get(_supp_error, transcription_data(backend).infvar_lookup[vref], support) end # ParameterFunctionIndex @@ -545,7 +492,6 @@ function lookup_by_support( support::Vector ) prefs = InfiniteOpt.raw_parameter_refs(fref) - func = InfiniteOpt.raw_function(fref) return InfiniteOpt.call_function(fref, Tuple(support, prefs)...) end @@ -595,21 +541,17 @@ function transcription_variable( mref::InfiniteOpt.GeneralVariableRef, ::Type{InfiniteOpt.MeasureIndex}, backend::TranscriptionBackend, - label::Type{<:InfiniteOpt.AbstractSupportLabel}, - ndarray::Bool = false + label::Type{<:InfiniteOpt.AbstractSupportLabel} ) exprs = get(transcription_data(backend).measure_mappings, mref, nothing) if isnothing(exprs) error("Measure reference $mref not used in transcription backend.") end - if ndarray - return make_ndarray(backend, mref, exprs, label) - elseif length(exprs) > 1 && _ignore_label(backend, label) + if length(exprs) > 1 && _ignore_label(backend, label) return exprs elseif length(exprs) > 1 labels = transcription_data(backend).measure_support_labels[mref] - inds = map(s -> any(l -> l <: label, s), labels) - return exprs[inds] + return _truncate_by_label(exprs, labels, label) else return first(exprs) end @@ -633,32 +575,18 @@ end function InfiniteOpt.variable_supports( dmref::InfiniteOpt.MeasureRef, backend::TranscriptionBackend; - label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false + label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel ) mref = InfiniteOpt.GeneralVariableRef(JuMP.owner_model(dmref), JuMP.index(dmref)) if !haskey(transcription_data(backend).measure_mappings, mref) error("Measure reference $mref not used in transcription backend.") - elseif !haskey(transcription_data(backend).measure_supports, mref) - lookups = transcription_data(backend).measure_lookup[mref] - prefs = InfiniteOpt.parameter_refs(dmref) - vt_prefs = InfiniteOpt.Collections.VectorTuple(prefs) - type = typeof(Tuple(first(keys(lookups)), vt_prefs)) - supps = Vector{type}(undef, length(lookups)) - for (supp, i) in lookups - supps[i] = Tuple(supp, vt_prefs) - end - transcription_data(backend).measure_supports[mref] = supps end supps = transcription_data(backend).measure_supports[mref] - if ndarray - return make_ndarray(backend, dmref, supps, label) - elseif length(supps) > 1 && _ignore_label(backend, label) + if length(supps) > 1 && _ignore_label(backend, label) return supps elseif length(supps) > 1 labels = transcription_data(backend).measure_support_labels[mref] - inds = map(s -> any(l -> l <: label, s), labels) - return supps[inds] + return _truncate_by_label(supps, labels, label) else return first(supps) end @@ -671,8 +599,7 @@ end transcription_expression( expr::JuMP.AbstractJuMPScalar, backend::TranscriptionBackend; - [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false] + [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel] ) Return the transcribed expression(s) corresponding to `expr`. Errors @@ -680,80 +607,70 @@ if `expr` cannot be transcribed. Also can query via the syntax: ```julia transcription_expression( expr::JuMP.AbstractJuMPScalar; - [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false] + [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel] ) ``` If the infinite model contains a built transcription backend. By default, this method returns only transcribed expressions associated with public supports. All the expressions can be returned by setting `label = All`. -If `expr` is infinite and `ndarray = true` then an n-dimensional array will be -returned in accordance with the infinite parameters that have unique object -numbers. In this case, `label` will be used to search the intersection of the +If `expr` is infinite, then `label` will be used to search the intersection of the supports that use the label. This is defers from the default behavior which considers the union. **Example** ```julia-repl julia> transcription_expression(my_expr, backend) -x(support: 1) - y +x[1] - y julia> transcription_expression(my_expr) -x(support: 1) - y +x[1] - y ``` """ function transcription_expression( expr::Union{JuMP.GenericAffExpr, JuMP.GenericQuadExpr, JuMP.GenericNonlinearExpr}, backend::TranscriptionBackend; - label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false + label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel ) # get the parameter group integer indices of the expression and form the support iterator - group_int_idxs = InfiniteOpt.parameter_group_int_indices(expr) - support_indices = support_index_iterator(backend, group_int_idxs) - exprs = Vector{JuMP.AbstractJuMPScalar}(undef, length(support_indices)) - check_labels = length(exprs) > 1 && !_ignore_label(backend, label) - label_inds = ones(Bool, length(exprs)) + group_idxs = InfiniteOpt.parameter_group_int_indices(expr) + support_indices = support_index_iterator(backend, group_idxs) + dims = size(support_indices)[group_idxs] + exprs = Array{JuMP.AbstractJuMPScalar, length(dims)}(undef, dims...) + labels = Array{Set{DataType}, length(dims)}(undef, dims...) # iterate over the indices and compute the values - for (i, idx) in enumerate(support_indices) + for idx in support_indices supp = index_to_support(backend, idx) - if check_labels && !any(l -> l <: label, index_to_labels(backend, idx)) - @inbounds label_inds[i] = false - end - @inbounds exprs[i] = transcription_expression(expr, backend, supp) + expr_idx = idx.I[group_idxs] + @inbounds labels[expr_idx...] = index_to_labels(backend, idx) + @inbounds exprs[expr_idx...] = transcription_expression(expr, backend, supp) end - # return the expressions - if ndarray - return make_ndarray(backend, expr, exprs, label) - else - exprs = exprs[label_inds] - return length(support_indices) > 1 ? exprs : first(exprs) + # return the values + if !_ignore_label(backend, label) + exprs = _truncate_by_label(exprs, labels, label) end + return length(support_indices) > 1 ? exprs : first(exprs) end # Define for variables function transcription_expression( vref::InfiniteOpt.GeneralVariableRef, backend::TranscriptionBackend; - label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false) - return transcription_variable(vref, backend, label = label, ndarray = ndarray) + label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel) + return transcription_variable(vref, backend, label = label) end # Dispatch for internal backends function transcription_expression( expr::JuMP.AbstractJuMPScalar; - label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false + label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel ) model = JuMP.owner_model(expr) isnothing(model) && return zero(JuMP.AffExpr) + JuMP.constant(expr) return transcription_expression( expr, model.backend, - label = label, - ndarray = ndarray + label = label ) end @@ -771,18 +688,16 @@ Proper extension of [`InfiniteOpt.transformation_expression`](@ref) for function InfiniteOpt.transformation_expression( expr::Union{JuMP.GenericAffExpr, JuMP.GenericQuadExpr, JuMP.GenericNonlinearExpr}, backend::TranscriptionBackend; - label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false + label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel ) - return transcription_expression(expr, backend, label = label, ndarray = ndarray) + return transcription_expression(expr, backend, label = label) end """ InfiniteOpt.expression_supports( expr::JuMP.AbstractJuMPScalar, backend::TranscriptionBackend; - [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false] + [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel] ) Return the support alias mappings associated with `expr`. Errors if `expr` cannot @@ -791,30 +706,27 @@ be transcribed. function InfiniteOpt.expression_supports( expr::Union{JuMP.GenericAffExpr, JuMP.GenericQuadExpr, JuMP.GenericNonlinearExpr}, backend::TranscriptionBackend; - label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false + label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel ) # get the parameter group integer indices of the expression and form the support iterator - group_int_idxs = sort(InfiniteOpt.parameter_group_int_indices(expr)) - support_indices = support_index_iterator(backend, group_int_idxs) - supps = Vector{Tuple}(undef, length(support_indices)) - check_labels = length(supps) > 1 && !_ignore_label(backend, label) + group_idxs = InfiniteOpt.parameter_group_int_indices(expr) + support_indices = support_index_iterator(backend, group_idxs) + dims = size(support_indices)[group_idxs] + supps = Array{Tuple, length(dims)}(undef, dims...) + labels = Array{Set{DataType}, length(dims)}(undef, dims...) param_supps = parameter_supports(backend) - label_inds = ones(Bool, length(supps)) # iterate over the indices and compute the values - for (i, idx) in enumerate(support_indices) - if check_labels && !any(l -> l <: label, index_to_labels(backend, idx)) - @inbounds label_inds[i] = false - end - @inbounds supps[i] = Tuple(param_supps[j][idx[j]] for j in group_int_idxs) + for idx in support_indices + supp = index_to_support(backend, idx) + expr_idx = idx.I[group_idxs] + @inbounds labels[expr_idx...] = index_to_labels(backend, idx) + @inbounds supps[expr_idx...] = Tuple(param_supps[j][idx[j]] for j in group_int_idxs) end - # return the supports - if ndarray - return make_ndarray(backend, expr, supps, label) - else - supps = supps[label_inds] - return length(support_indices) > 1 ? supps : first(supps) + # return the values + if !_ignore_label(backend, label) + supps = _truncate_by_label(supps, labels, label) end + return length(support_indices) > 1 ? supps : first(supps) end ################################################################################ @@ -824,8 +736,7 @@ end transcription_constraint( cref::InfiniteOpt.InfOptConstraintRef, backend::TranscriptionBackend; - [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false] + [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel] ) Return the transcribed constraint reference(s) corresponding to `cref`. Errors @@ -833,47 +744,40 @@ if `cref` has not been transcribed. Also can query via the syntax: ```julia transcription_constraint( cref::InfiniteOpt.InfOptConstraintRef; - [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false] + [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel] ) ``` If the infinite model contains a built transcription backend. By default, this method returns only transcribed constraints associated with public supports. All the constraints can be returned by setting `label = All`. -If `cref` is infinite and `ndarray = true` then an n-dimensional array will be -returned in accordance with the infinite parameters that have unique object -numbers. In this case, `label` will be used to search the intersection of the +If `cref` is infinite, then `label` will be used to search the intersection of the supports that use the label. This is defers from the default behavior which considers the union. **Example** ```julia-repl julia> transcription_constraint(fin_con, backend) -fin_con : x(support: 1) - y <= 3.0 +fin_con : x[1] - y <= 3.0 julia> transcription_constraint(fin_con) -fin_con : x(support: 1) - y <= 3.0 +fin_con : x[1] - y <= 3.0 ``` """ function transcription_constraint( cref::InfiniteOpt.InfOptConstraintRef, backend::TranscriptionBackend; - label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false + label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel ) constr = get(transcription_data(backend).constr_mappings, cref, nothing) if isnothing(constr) error("Constraint reference $cref not used in transcription backend.") end - if ndarray - return make_ndarray(backend, cref, constr, label) - elseif length(constr) > 1 && _ignore_label(backend, label) + if length(constr) > 1 && _ignore_label(backend, label) return constr elseif length(constr) > 1 labels = transcription_data(backend).constr_support_labels[cref] - inds = map(s -> any(l -> l <: label, s), labels) - return constr[inds] + return _truncate_by_label(constr, labels, label) else return first(constr) end @@ -882,14 +786,12 @@ end # Dispatch for internal backends function transcription_constraint( cref::InfiniteOpt.InfOptConstraintRef; - label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false + label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel ) return transcription_constraint( cref, JuMP.owner_model(cref).backend, - label = label, - ndarray = ndarray + label = label ) end @@ -897,8 +799,7 @@ end InfiniteOpt.transformation_constraint( cref::InfiniteOpt.InfOptConstraintRef, backend::TranscriptionBackend; - [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel - ndarray::Bool = false] + [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel] ) Proper extension of [`InfiniteOpt.transformation_constraint`](@ref) for @@ -907,18 +808,16 @@ Proper extension of [`InfiniteOpt.transformation_constraint`](@ref) for function InfiniteOpt.transformation_constraint( cref::InfiniteOpt.InfOptConstraintRef, backend::TranscriptionBackend; - label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false + label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel ) - return transcription_constraint(cref, backend, label = label, ndarray = ndarray) + return transcription_constraint(cref, backend, label = label) end """ InfiniteOpt.constraint_supports( cref::InfiniteOpt.InfOptConstraintRef, backend::TranscriptionBackend; - [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false]) + [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel]) Return the support alias mappings associated with `cref`. Errors if `cref` is not transcribed. @@ -926,21 +825,17 @@ not transcribed. function InfiniteOpt.constraint_supports( cref::InfiniteOpt.InfOptConstraintRef, backend::TranscriptionBackend; - label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false + label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel ) supps = get(transcription_data(backend).constr_supports, cref, nothing) if isnothing(supps) error("Constraint reference $cref not used in transcription backend.") end - if ndarray - return make_ndarray(backend, cref, supps, label) - elseif length(supps) > 1 && _ignore_label(backend, label) + if length(supps) > 1 && _ignore_label(backend, label) return supps elseif length(supps) > 1 labels = transcription_data(backend).constr_support_labels[cref] - inds = map(s -> any(l -> l <: label, s), labels) - return supps[inds] + return _truncate_by_label(supps, labels, label) else return first(supps) end @@ -999,7 +894,7 @@ using placeholder `NaN`s as appropriate for tuple elements that are unneeded. function index_to_support( backend::TranscriptionBackend, index::CartesianIndex - )::Vector{Float64} + ) raw_supps = parameter_supports(backend) return [j for i in eachindex(index.I) for j in raw_supps[i][index[i]]] end @@ -1021,73 +916,3 @@ function index_to_labels( end return labels end - -################################################################################ -# QUERY FORMATERS -################################################################################ -# Helper function for getting the array type T -function _get_array_type(array::Array{T, N}) where {T, N} - return T -end - -## Helper functions to consistently get parameter group integer indices -# Fallback -function _getparameter_group_int_indices(ref) - return InfiniteOpt.parameter_group_int_indices(ref) -end - -# Expressions -function _getparameter_group_int_indices( - expr::Union{JuMP.GenericAffExpr, JuMP.GenericQuadExpr} - ) - return sort(InfiniteOpt.parameter_group_int_indices(expr)) -end - -""" - make_narray( - backend::TranscriptionBackend, - ref::Union{JuMP.AbstractJuMPScalar, InfiniteOpt.InfOptConstraintRef}, - info::Vector, - label::Type{<:InfiniteOpt.AbstractSupportLabel} - )::Array - -Take the results `info` associated with `ref` and rearrange them into an -n-dimensional array where the axes correspond to the infinite parameter dependencies -in accordance with their creation. Note that this works by querying the object -numbers. Thus, independent infinite parameters will each get their own dimension -(even if they are defined at the same time in an array) and each dependent infinite -parameter group will have its own dimension. -""" -function make_ndarray(backend::TranscriptionBackend, ref, info::Vector, label::DataType) - # get the parameter group integer indices - group_int_idxs = _getparameter_group_int_indices(ref) - # return result if it is from a finite object - if isempty(group_int_idxs) - return info - end - # determine the dimensions of the new array - raw_supps = parameter_supports(backend) - dims = Tuple(length(raw_supps[i]) - 1 for i in eachindex(raw_supps) if i in group_int_idxs) - # check that the lengths match (otherwise we'll have some sparse set) - # TODO add capability to avoid this problem (make reduced array by looking at the supports) - if length(info) != prod(dims) - error("Unable to make `ndarray`. This is likely due to the object being " * - "over a portion of the infinite-domain (e.g., bounded constraints and " * - "certain semi-infinite variables.") - end - # make and populate the array - narray = Array{_get_array_type(info)}(undef, dims) - for (i, idx) in enumerate(eachindex(narray)) - narray[idx] = info[i] - end - # rearrange the array as needed to match the object number order - sorted_array = issorted(group_int_idxs) ? narray : permutedims(narray, sortperm(group_int_idxs)) - # consider the label specified (this will enforce the intersection of labels) - if _ignore_label(backend, label) - return sorted_array - else - labels = transcription_data(backend).support_labels[group_int_idxs] - inds = map(sets -> findall(s -> any(l -> l <: label, s), sets), labels) - return sorted_array[inds...] - end -end diff --git a/src/TranscriptionOpt/transcribe.jl b/src/TranscriptionOpt/transcribe.jl index 49452d7d..22ca56a3 100644 --- a/src/TranscriptionOpt/transcribe.jl +++ b/src/TranscriptionOpt/transcribe.jl @@ -1,44 +1,6 @@ ################################################################################ # SUPPORT ITERATION METHODS ################################################################################ -## Form a placeholder parameter reference given the object index -# IndependentParameterIndex -function _temp_parameter_ref( - model::InfiniteOpt.InfiniteModel, - idx::InfiniteOpt.IndependentParameterIndex - ) - return InfiniteOpt.IndependentParameterRef(model, idx) -end - -# DependentParametersIndex -function _temp_parameter_ref( - model::InfiniteOpt.InfiniteModel, - idx::InfiniteOpt.DependentParametersIndex - ) - idx = InfiniteOpt.DependentParameterIndex(idx, 1) - return InfiniteOpt.DependentParameterRef(model, idx) -end - -# Return the collected supports of an infinite parameter -function _collected_supports( - pref::Union{InfiniteOpt.IndependentParameterRef, InfiniteOpt.DependentParameterRef} - ) - supp_dict = InfiniteOpt._parameter_supports(pref) - supp_list = collect(keys(supp_dict)) - # append a placeholder NaN support at the end to be used for efficient combinatorics - return push!(supp_list, map(i -> NaN, first(supp_list))) -end - -# Return the collected support labels of an infinite parameter -function _collected_support_labels( - pref::Union{InfiniteOpt.IndependentParameterRef, InfiniteOpt.DependentParameterRef}, - supports::Vector - ) - supp_dict = InfiniteOpt._parameter_supports(pref) - default = Set{DataType}() - return map(k -> get(supp_dict, k, default), supports) -end - """ set_parameter_supports( backend::TranscriptionBackend, @@ -63,20 +25,27 @@ function set_parameter_supports( model::InfiniteOpt.InfiniteModel ) # gather the basic information - param_indices = InfiniteOpt.parameter_group_indices(model) - prefs = map(idx -> _temp_parameter_ref(model, idx), param_indices) + prefs = InfiniteOpt.parameter_refs(model) data = transcription_data(backend) # check and add supports to prefs as needed - for pref in prefs - InfiniteOpt.add_generative_supports(pref) - if InfiniteOpt.has_internal_supports(pref) + for group in prefs + InfiniteOpt.add_generative_supports(first(group)) + if InfiniteOpt.has_internal_supports(first(group)) data.has_internal_supports = true - end + end end # build and add the support/label tuples - supps = Tuple(_collected_supports(pref) for pref in prefs) - labels = Tuple(_collected_support_labels(pref, supps[i]) - for (i, pref) in enumerate(prefs)) + supps = Tuple(begin + supp_dict = InfiniteOpt.core_object(first(group)).supports + supp_list = collect(keys(supp_dict)) + # append a placeholder NaN support at the end to be used for efficient combinatorics + push!(supp_list, map(i -> NaN, first(supp_list))) + end for group in prefs) + labels = Tuple(begin + supp_dict = InfiniteOpt.core_object(first(group)).supports + default = Set{DataType}() + map(k -> get(supp_dict, k, default), supps[i]) + end for (i, group) in enumerate(prefs)) data.supports = supps data.support_labels = labels return @@ -150,21 +119,28 @@ function transcribe_infinite_variables!( var = object.variable base_name = object.name param_nums = var.parameter_nums + group_idxs = var.group_int_idxs + prefs = var.parameter_refs # prepare for iterating over its supports - supp_indices = support_index_iterator(backend, var.group_int_idxs) - vrefs = Vector{JuMP.VariableRef}(undef, length(supp_indices)) - labels = Vector{Set{DataType}}(undef, length(supp_indices)) - lookup_dict = Dict{Vector{Float64}, Int}() + supp_indices = support_index_iterator(backend, group_idxs) + dims = size(supp_indices)[group_idxs] + vrefs = Array{JuMP.VariableRef, length(dims)}(undef, dims...) + labels = Array{Set{DataType}, length(dims)}(undef, dims...) + supp_type = typeof(Tuple(ones(length(prefs)), prefs)) + supps = Array{supp_type, length(dims)}(undef, dims...) + lookup_dict = Dict{Vector{Float64}, JuMP.VariableRef}() # create a variable for each support - for (counter, i) in enumerate(supp_indices) - raw_supp = index_to_support(backend, i) - supp = raw_supp[param_nums] + for i in supp_indices + supp = index_to_support(backend, i)[param_nums] info = _format_infinite_info(var, supp) - v_name = string(base_name, "(support: ", counter, ")") + var_idx = i.I[group_idxs] + v_name = string(base_name, "[", join(var_idx, ","), "]") v = JuMP.ScalarVariable(info) - @inbounds vrefs[counter] = JuMP.add_variable(backend.model, v, v_name) - lookup_dict[supp] = counter - @inbounds labels[counter] = index_to_labels(backend, i) + jump_vref = JuMP.add_variable(backend.model, v, v_name) + @inbounds vrefs[var_idx...] = jump_vref + lookup_dict[supp] = jump_vref + @inbounds supps[var_idx...] = Tuple(supp, prefs) + @inbounds labels[var_idx...] = index_to_labels(backend, i) end # save the transcription information ivref = InfiniteOpt.GeneralVariableRef(model, idx) @@ -172,6 +148,7 @@ function transcribe_infinite_variables!( data.infvar_lookup[ivref] = lookup_dict data.infvar_mappings[ivref] = vrefs data.infvar_support_labels[ivref] = labels + data.infvar_supports[ivref] = supps end return end @@ -195,28 +172,35 @@ end function _transcribe_derivative_variable(dref, d, backend) base_name = InfiniteOpt.variable_string(MIME("text/plain"), dispatch_variable_ref(dref)) param_nums = InfiniteOpt._parameter_numbers(d.variable_ref) - group_int_idxs = InfiniteOpt.parameter_group_int_indices(d.variable_ref) + group_idxs = InfiniteOpt.parameter_group_int_indices(d.variable_ref) + prefs = InfiniteOpt.raw_parameter_refs(dref) # prepare for iterating over its supports - supp_indices = support_index_iterator(backend, group_int_idxs) - vrefs = Vector{JuMP.VariableRef}(undef, length(supp_indices)) - labels = Vector{Set{DataType}}(undef, length(supp_indices)) - lookup_dict = Dict{Vector{Float64}, Int}() + supp_indices = support_index_iterator(backend, group_idxs) + dims = size(supp_indices)[group_idxs] + vrefs = Array{JuMP.VariableRef, length(dims)}(undef, dims...) + labels = Array{Set{DataType}, length(dims)}(undef, dims...) + supp_type = typeof(Tuple(ones(length(prefs)), prefs)) + supps = Array{supp_type, length(dims)}(undef, dims...) + lookup_dict = Dict{Vector{Float64}, JuMP.VariableRef}() # create a variable for each support - for (counter, i) in enumerate(supp_indices) - raw_supp = index_to_support(backend, i) - supp = raw_supp[param_nums] + for i in supp_indices + supp = index_to_support(backend, i)[param_nums] info = _format_derivative_info(d, supp) - d_name = string(base_name, "(support: ", counter, ")") + var_idx = i.I[group_idxs] + d_name = string(base_name, "[", join(var_idx, ","), "]") d_var = JuMP.ScalarVariable(info) - @inbounds vrefs[counter] = JuMP.add_variable(backend.model, d_var, d_name) - lookup_dict[supp] = counter - @inbounds labels[counter] = index_to_labels(backend, i) + jump_vref = JuMP.add_variable(backend.model, d_var, d_name) + @inbounds vrefs[var_idx...] = jump_vref + lookup_dict[supp] = jump_vref + @inbounds supps[var_idx...] = Tuple(supp, prefs) + @inbounds labels[var_idx...] = index_to_labels(backend, i) end # save the transcription information data = transcription_data(backend) data.infvar_lookup[dref] = lookup_dict data.infvar_mappings[dref] = vrefs data.infvar_support_labels[dref] = labels + data.infvar_supports[dref] = supps return end @@ -274,37 +258,48 @@ function _set_semi_infinite_variable_mapping( ivref = var.infinite_variable_ref ivref_param_nums = InfiniteOpt._parameter_numbers(ivref) eval_supps = var.eval_supports + group_idxs = var.group_int_idxs + prefs = InfiniteOpt.raw_parameter_refs(rvref) # prepare for iterating over its supports - supp_indices = support_index_iterator(backend, var.group_int_idxs) - vrefs = Vector{JuMP.VariableRef}(undef, length(supp_indices)) - labels = Vector{Set{DataType}}(undef, length(supp_indices)) - lookup_dict = Dict{Vector{Float64}, Int}() - counter = 1 + supp_indices = support_index_iterator(backend, group_idxs) + dims = size(supp_indices)[group_idxs] + vrefs = Array{JuMP.VariableRef, length(dims)}(undef, dims...) + labels = Array{Set{DataType}, length(dims)}(undef, dims...) + supp_type = typeof(Tuple(ones(length(prefs)), prefs)) + supps = Array{supp_type, length(dims)}(undef, dims...) + lookup_dict = Dict{Vector{Float64}, JuMP.VariableRef}() + valid_idxs = ones(Bool, dims...) # map a variable for each support for i in supp_indices raw_supp = index_to_support(backend, i) # ensure this support is valid with the reduced restriction - if any(!isnan(raw_supp[ivref_param_nums[k]]) && raw_supp[ivref_param_nums[k]] != v - for (k, v) in eval_supps) + if any(!isnan(raw_supp[ivref_param_nums[k]]) && raw_supp[ivref_param_nums[k]] != v for (k, v) in eval_supps) + valid_idxs[lin_idx] = false continue end # map to the current transcription variable supp = raw_supp[param_nums] ivref_supp = [haskey(eval_supps, j) ? eval_supps[j] : raw_supp[k] for (j, k) in enumerate(ivref_param_nums)] - @inbounds vrefs[counter] = lookup_by_support(ivref, backend, ivref_supp) - lookup_dict[supp] = counter - @inbounds labels[counter] = index_to_labels(backend, i) - counter += 1 + var_idx = i.I[group_idxs] + jump_vref = lookup_by_support(ivref, backend, ivref_supp) + @inbounds vrefs[var_idx...] = jump_vref + lookup_dict[supp] = jump_vref + @inbounds supps[var_idx...] = Tuple(supp, prefs) + @inbounds labels[var_idx...] = index_to_labels(backend, i) end - # truncate vrefs if any supports were skipped because of dependent parameter supps - deleteat!(vrefs, counter:length(vrefs)) - deleteat!(labels, counter:length(vrefs)) - # save the transcription information + # truncate vrefs if any supports were skipped because of dependent parameter supps and save data = transcription_data(backend) + if !all(valid_idxs) + data.infvar_mappings[rvref] = vrefs[valid_idxs] + data.infvar_support_labels[rvref] = labels[valid_idxs] + data.infvar_supports[rvref] = supps[valid_idxs] + else + data.infvar_mappings[rvref] = vrefs + data.infvar_support_labels[rvref] = labels + data.infvar_supports[rvref] = supps + end data.infvar_lookup[rvref] = lookup_dict - data.infvar_mappings[rvref] = vrefs - data.infvar_support_labels[rvref] = labels return end @@ -555,6 +550,9 @@ function transcribe_measures!( for (idx, object) in InfiniteOpt._data_dictionary(model, InfiniteOpt.Measure) # get the basic information meas = object.measure + group_idxs = meas.group_int_idxs + mref = InfiniteOpt.GeneralVariableRef(model, idx) + prefs = InfiniteOpt.raw_parameter_refs(mref) # expand the measure if meas.constant_func new_expr = InfiniteOpt.analytic_expansion(meas.func, meas.data, backend) @@ -562,24 +560,29 @@ function transcribe_measures!( new_expr = InfiniteOpt.expand_measure(meas.func, meas.data, backend) end # prepare to transcribe over the supports - supp_indices = support_index_iterator(backend, meas.group_int_idxs) - exprs = Vector{JuMP.AbstractJuMPScalar}(undef, length(supp_indices)) - labels = Vector{Set{DataType}}(undef, length(supp_indices)) + supp_indices = support_index_iterator(backend, group_idxs) + dims = size(supp_indices)[group_idxs] + exprs = Array{JuMP.AbstractJuMPScalar, length(dims)}(undef, dims...) + labels = Array{Set{DataType}, length(dims)}(undef, dims...) + supp_type = typeof(Tuple(ones(length(prefs)), prefs)) + supps = Array{supp_type, length(dims)}(undef, dims...) lookup_dict = Dict{Vector{Float64}, Int}() # map a variable for each support - for (counter, i) in enumerate(supp_indices) + for (lin_idx, i) in enumerate(supp_indices) raw_supp = index_to_support(backend, i) - @inbounds exprs[counter] = transcription_expression(new_expr, backend, raw_supp) + expr_idx = i.I[group_idxs] + @inbounds exprs[expr_idx...] = transcription_expression(new_expr, backend, raw_supp) supp = raw_supp[meas.parameter_nums] - lookup_dict[supp] = counter - @inbounds labels[counter] = index_to_labels(backend, i) + lookup_dict[supp] = lin_idx + @inbounds supps[expr_idx...] = Tuple(supp, prefs) + @inbounds labels[expr_idx...] = index_to_labels(backend, i) end # save the transcription information - mref = InfiniteOpt.GeneralVariableRef(model, idx) data = transcription_data(backend) data.measure_lookup[mref] = lookup_dict data.measure_mappings[mref] = exprs data.measure_support_labels[mref] = labels + data.measure_supports[mref] = supps end return end @@ -755,14 +758,15 @@ function transcribe_constraints!( constr = object.constraint func = JuMP.jump_function(constr) set = JuMP.moi_set(constr) - group_int_idxs = object.group_int_idxs + group_idxs = object.group_int_idxs cref = InfiniteOpt.InfOptConstraintRef(model, idx) # prepare the iteration helpers - supp_indices = support_index_iterator(backend, group_int_idxs) - crefs = Vector{JuMP.ConstraintRef}(undef, length(supp_indices)) - supps = Vector{Tuple}(undef, length(supp_indices)) - labels = Vector{Set{DataType}}(undef, length(supp_indices)) - counter = 1 + supp_indices = support_index_iterator(backend, group_idxs) + dims = size(supp_indices)[group_idxs] + crefs = Array{JuMP.ConstraintRef, length(dims)}(undef, dims...) + supps = Array{Tuple, length(dims)}(undef, dims...) + labels = Array{Set{DataType}, length(dims)}(undef, dims...) + valid_idxs = ones(Bool, dims...) # iterate over the support indices for the info constraints if object.is_info_constraint for i in supp_indices @@ -770,12 +774,14 @@ function transcribe_constraints!( info_ref = _get_info_constr_from_var(backend, func, set, raw_supp) # not all supports may be defined if overwritten by a point variable + con_idx = i.I[group_idxs] if !isnothing(info_ref) - @inbounds crefs[counter] = info_ref - @inbounds supps[counter] = Tuple(param_supps[j][i[j]] - for j in group_int_idxs) - @inbounds labels[counter] = index_to_labels(backend, i) - counter += 1 + @inbounds crefs[con_idx...] = info_ref + @inbounds supps[con_idx...] = Tuple(param_supps[j][i[j]] + for j in group_idxs) + @inbounds labels[con_idx...] = index_to_labels(backend, i) + else + valid_idxs[con_idx...] = false end end # iterate over the supports for regular constraints @@ -789,28 +795,33 @@ function transcribe_constraints!( for i in supp_indices raw_supp = index_to_support(backend, i) # ensure the support satisfies parameter bounds and then add it + con_idx = i.I[group_idxs] if _support_in_restrictions(raw_supp, restrict_indices, restrict_domains) - new_name = isempty(name) ? "" : string(name, "(support: ", counter, ")") + new_name = isempty(name) ? "" : string(name, "[", join(con_idx, ","), "]") new_cref = _process_constraint(backend, constr, func, set, raw_supp, new_name) - @inbounds crefs[counter] = new_cref - @inbounds supps[counter] = Tuple(param_supps[j][i[j]] - for j in group_int_idxs) - @inbounds labels[counter] = index_to_labels(backend, i) - counter += 1 + @inbounds crefs[con_idx...] = new_cref + @inbounds supps[con_idx...] = Tuple(param_supps[j][i[j]] + for j in group_idxs) + @inbounds labels[con_idx...] = index_to_labels(backend, i) + else + valid_idxs[con_idx...] = false end end end # truncate the arrays in case not all the supports satisfied the bounds - deleteat!(crefs, counter:length(crefs)) - deleteat!(supps, counter:length(supps)) - deleteat!(labels, counter:length(supps)) - # add the constraint mappings to the trans model + # and save data = transcription_data(backend) - data.constr_mappings[cref] = crefs - data.constr_supports[cref] = supps - data.constr_support_labels[cref] = labels + if !all(valid_idxs) + data.constr_mappings[cref] = crefs[valid_idxs] + data.constr_supports[cref] = supps[valid_idxs] + data.constr_support_labels[cref] = labels[valid_idxs] + else + data.constr_mappings[cref] = crefs + data.constr_supports[cref] = supps + data.constr_support_labels[cref] = labels + end end return end From a6968579424f3378149cf07ecfde96e2e8e93fa1 Mon Sep 17 00:00:00 2001 From: pulsipher Date: Mon, 29 Jul 2024 11:32:26 -0400 Subject: [PATCH 2/6] finalize removal of `ndarray` and fix bugs --- .../Optimal Control/pandemic_control.jl | 8 +- docs/src/guide/optimize.md | 78 ++-- docs/src/guide/result.md | 44 +-- docs/src/guide/transcribe.md | 171 ++++----- src/TranscriptionOpt/model.jl | 148 ++++---- src/TranscriptionOpt/transcribe.jl | 56 +-- src/backends.jl | 72 ++-- src/results.jl | 47 ++- src/semi_infinite_variables.jl | 16 +- test/TranscriptionOpt/measure.jl | 23 +- test/TranscriptionOpt/model.jl | 343 +++++++----------- test/TranscriptionOpt/transcribe.jl | 55 +-- test/backend_mappings.jl | 7 - test/results.jl | 12 +- 14 files changed, 468 insertions(+), 612 deletions(-) diff --git a/docs/src/examples/Optimal Control/pandemic_control.jl b/docs/src/examples/Optimal Control/pandemic_control.jl index 4415dbb1..5772e08b 100644 --- a/docs/src/examples/Optimal Control/pandemic_control.jl +++ b/docs/src/examples/Optimal Control/pandemic_control.jl @@ -178,10 +178,10 @@ optimize!(model) # retrieve our values using `value`. # Get the results: -r_opt = value(r, ndarray = true) * 100 # make the population fractions into percentages -s_opt = value(s, ndarray = true) * 100 -i_opt = value(i, ndarray = true) * 100 -e_opt = value(e, ndarray = true) * 100 +r_opt = value(r) * 100 # make the population fractions into percentages +s_opt = value(s) * 100 +i_opt = value(i) * 100 +e_opt = value(e) * 100 u_opt = value(u) obj_opt = objective_value(model) ts = value(t) diff --git a/docs/src/guide/optimize.md b/docs/src/guide/optimize.md index 9ec9e4b9..39324924 100644 --- a/docs/src/guide/optimize.md +++ b/docs/src/guide/optimize.md @@ -127,16 +127,16 @@ Thus, using the going example we get: ```jldoctest optimize julia> transformation_variable(y) # infinite variable 10-element Vector{VariableRef}: - y(support: 1) - y(support: 2) - y(support: 3) - y(support: 4) - y(support: 5) - y(support: 6) - y(support: 7) - y(support: 8) - y(support: 9) - y(support: 10) + y(0.0) + y(1.11111111111) + y(2.22222222222) + y(3.33333333333) + y(4.44444444444) + y(5.55555555556) + y(6.66666666667) + y(7.77777777778) + y(8.88888888889) + y(10.0) julia> transformation_variable(z) # finite variable z @@ -148,48 +148,40 @@ Thus, using going example we get: ```jldoctest optimize julia> transformation_constraint(c1) # infinite constraint 10-element Vector{ConstraintRef}: - c1(support: 1) : z - y(support: 1) ≥ 0 - c1(support: 2) : z - y(support: 2) ≥ 0 - c1(support: 3) : z - y(support: 3) ≥ 0 - c1(support: 4) : z - y(support: 4) ≥ 0 - c1(support: 5) : z - y(support: 5) ≥ 0 - c1(support: 6) : z - y(support: 6) ≥ 0 - c1(support: 7) : z - y(support: 7) ≥ 0 - c1(support: 8) : z - y(support: 8) ≥ 0 - c1(support: 9) : z - y(support: 9) ≥ 0 - c1(support: 10) : z - y(support: 10) ≥ 0 + c1[1] : z - y(0.0) ≥ 0 + c1[2] : z - y(1.11111111111) ≥ 0 + c1[3] : z - y(2.22222222222) ≥ 0 + c1[4] : z - y(3.33333333333) ≥ 0 + c1[5] : z - y(4.44444444444) ≥ 0 + c1[6] : z - y(5.55555555556) ≥ 0 + c1[7] : z - y(6.66666666667) ≥ 0 + c1[8] : z - y(7.77777777778) ≥ 0 + c1[9] : z - y(8.88888888889) ≥ 0 + c1[10] : z - y(10.0) ≥ 0 ``` We can also query the expressions via [`transformation_expression`](@ref transformation_expression(::JuMP.AbstractJuMPScalar)): ```jldoctest optimize julia> transformation_expression(z - y^2 + 3) # infinite expression 10-element Vector{AbstractJuMPScalar}: - -y(support: 1)² + z + 3 - -y(support: 2)² + z + 3 - -y(support: 3)² + z + 3 - -y(support: 4)² + z + 3 - -y(support: 5)² + z + 3 - -y(support: 6)² + z + 3 - -y(support: 7)² + z + 3 - -y(support: 8)² + z + 3 - -y(support: 9)² + z + 3 - -y(support: 10)² + z + 3 + -y(0.0)² + z + 3 + -y(1.11111111111)² + z + 3 + -y(2.22222222222)² + z + 3 + -y(3.33333333333)² + z + 3 + -y(4.44444444444)² + z + 3 + -y(5.55555555556)² + z + 3 + -y(6.66666666667)² + z + 3 + -y(7.77777777778)² + z + 3 + -y(8.88888888889)² + z + 3 + -y(10.0)² + z + 3 ``` !!! note - 1. Like `supports` the `transformation_[obj]` methods also employ the - `label::Type{AbstractSupportLabel} = PublicLabel` keyword argument that by - default will return variables/expressions/constraints associated with public - supports. The full set (e.g., ones corresponding to internal collocation nodes) - is obtained via `label = All`. - 2. These methods also employ the `ndarray::Bool` keyword argument that will cause the - output to be formatted as an n-dimensional array where the dimensions - correspond to the infinite parameter dependencies. For example, if we have an - infinite variable `y(t, ξ)`, and we invoke a query method with `ndarray = true` - then we'll get a matrix whose dimensions correspond to the supports of `t` and - `ξ`, respectively. Also, if `ndarray = true` then `label` correspond to the - intersection of supports labels in contrast to its default of invoking the union - of the labels. + Like `supports` the `transformation_[obj]` methods also employ the + `label::Type{AbstractSupportLabel} = PublicLabel` keyword argument that by + default will return variables/expressions/constraints associated with public + supports. The full set (e.g., ones corresponding to internal collocation nodes) + is obtained via `label = All`. The purpose of this `transformation_backend` abstraction is to readily enable user-defined reformulation extensions (e.g., using polynomial chaos expansion theory). However, diff --git a/docs/src/guide/result.md b/docs/src/guide/result.md index a00874b4..7b4de659 100644 --- a/docs/src/guide/result.md +++ b/docs/src/guide/result.md @@ -157,7 +157,7 @@ julia> dual(c1) 1.1930560126841273e-10 1.1930560126841273e-10 ``` -`c1` is an infinite constraint and thus we obtain the duals of its transcribed +`c1` is an infinite constraint, and thus we obtain the duals of its transcribed versions. The underlying infinite parameter(s) and support values are queried via `parameter_refs` and `supports`: ```jldoctest results @@ -181,9 +181,7 @@ These again all have a 1-to-1 correspondence. !!! note In the case that our variables/constraints depend on multiple infinite - parameter it is typically convenient to add the keyword statement - `ndarray = true` when calling any variable/constraint queries (e.g., `value` - and `dual`). This will reformat the output vector into an n-dimensional array + parameters, an n-dimensional array will typically be returned whose dimensions correspond to the supports of the infinite parameters. ## Termination Queries @@ -240,19 +238,11 @@ information. Thus, here the queries are extended to work with the specifics of the transformation backend to return the appropriate info. !!! note - 1. Like `supports` the all variable based query methods below also employ the - `label::Type{AbstractSupportLabel} = PublicLabel` keyword argument that by - default will return the desired information associated with public - supports. The full set (e.g., ones corresponding to internal collocation nodes) - is obtained via `label = All`. - 2. These methods also employ the `ndarray::Bool` keyword argument that will cause the - output to be formatted as an n-dimensional array where the dimensions - correspond to the infinite parameter dependencies. For example, if we have an - infinite variable `y(t, ξ)` and we invoke a query method with `ndarray = true` - then we'll get a matrix whose dimensions correspond to the supports of `t` and - `ξ`, respectively. Also, if `ndarray = true` then `label` correspond to the - intersection of supports labels in contrast to its default of invoking the union - of the labels. + Like `supports` the all variable based query methods below also employ the + `label::Type{AbstractSupportLabel} = PublicLabel` keyword argument that by + default will return the desired information associated with public + supports. The full set (e.g., ones corresponding to internal collocation nodes) + is obtained via `label = All`. First, we should verify that the transformed variable in fact has variable values via [`has_values`](@ref). In our example, we have: @@ -300,19 +290,11 @@ appropriate versions of [`map_optimizer_index`](@ref InfiniteOpt.map_optimizer_i Like variables, a variety of information can be queried about constraints. !!! note - 1. Like `supports`, all the constraint query methods below also employ the - `label::Type{AbstractSupportLabel} = PublicLabel` keyword argument that by - default will return the desired information associated with public - supports. The full set (e.g., ones corresponding to internal collocation nodes) - is obtained via `label = All`. - 2. These methods also employ the `ndarray::Bool` keyword argument that will cause the - output to be formatted as an n-dimensional array where the dimensions - correspond to the infinite parameter dependencies. For example, if we have an - infinite constraint that depends on `t` and `ξ)`, and we invoke a query method - with `ndarray = true` then we'll get a matrix whose dimensions correspond to - the supports of `t` and `ξ`, respectively. Also, if `ndarray = true` then - `label` correspond to the intersection of supports labels in contrast to its - default of invoking the union of the labels. + Like `supports`, all the constraint query methods below also employ the + `label::Type{AbstractSupportLabel} = PublicLabel` keyword argument that by + default will return the desired information associated with public + supports. The full set (e.g., ones corresponding to internal collocation nodes) + is obtained via `label = All`. First, recall that constraints are stored in the form `function-in-set` where generally `function` contains the variables and coefficients and the set contains @@ -438,7 +420,7 @@ julia> report[z] ``` Note that like other query methods, an array of ranges will be provided with testing the sensitivity of an infinite constraint RHS in accordance with the -discretization scheme. Also, keyword arguments (like `ndarray` and `label`) can +discretization scheme. Also, keyword arguments (like `label`) can be invoked when indexing the report: ```julia-repl julia> report[c1, label = All] diff --git a/docs/src/guide/transcribe.md b/docs/src/guide/transcribe.md index 949af16d..caeae42b 100644 --- a/docs/src/guide/transcribe.md +++ b/docs/src/guide/transcribe.md @@ -80,23 +80,21 @@ CachingOptimizer state: NO_OPTIMIZER Solver name: No optimizer attached. julia> print(trans_model) -Min 2 z + y(support: 1) + y(support: 2) + y(support: 3) +Min 2 z + y(0.0) + y(5.0) + y(10.0) Subject to - initial(support: 1) : y(support: 1) = 1 - constr(support: 1) : y(support: 1)² - z ≤ 42 - constr(support: 2) : y(support: 2)² - z ≤ 42 - constr(support: 3) : y(support: 3)² - z ≤ 42 - y(support: 1) ≥ 0 - y(support: 2) ≥ 0 - y(support: 3) ≥ 0 + initial : y(0.0) = 1 + constr[1] : y(0.0)² - z ≤ 42 + constr[2] : y(5.0)² - z ≤ 42 + constr[3] : y(10.0)² - z ≤ 42 + y(0.0) ≥ 0 + y(5.0) ≥ 0 + y(10.0) ≥ 0 z binary -``` Thus, we have a transcribed `JuMP` model. To be precise, data on the mapping between the transcribed variables/constraints and their infinite counterparts is also generated as part of the `TranscriptionBackend` that `trans_model` is part of. Notice, that -multiple finite variables have been introduced to discretize `y(t)` at supports 1, 2, -and 3 which correspond to 0, 5, and 10 as can be queried by `supports`: +multiple finite variables have been introduced to discretize `y(t)` at supports 0, 5, and 10 which we can can also query via `supports`: ```jldoctest transcribe julia> supports(y) 3-element Vector{Tuple}: @@ -109,11 +107,10 @@ except the initial condition which naturally is only invoked for the first suppo point. Furthermore, the transcription variable(s) of any variable associated with the infinite model can be determined via [`transformation_variable`](@ref): ```jldoctest transcribe -julia> transformation_variable(y) 3-element Vector{VariableRef}: - y(support: 1) - y(support: 2) - y(support: 3) + y(0.0) + y(5.0) + y(10.0) julia> transformation_variable(z) z @@ -123,13 +120,13 @@ can be queried via [`transformation_constraint`](@ref) and the associated suppor and infinite parameters can be found via `supports` and `parameter_refs`: ```jldoctest transcribe julia> transformation_constraint(initial) -initial(support: 1) : y(support: 1) = 1 +initial : y(0.0) = 1 julia> transformation_constraint(constr) 3-element Vector{ConstraintRef}: - constr(support: 1) : y(support: 1)² - z ≤ 42 - constr(support: 2) : y(support: 2)² - z ≤ 42 - constr(support: 3) : y(support: 3)² - z ≤ 42 + constr[1] : y(0.0)² - z ≤ 42 + constr[2] : y(5.0)² - z ≤ 42 + constr[3] : y(10.0)² - z ≤ 42 julia> supports(constr) 3-element Vector{Tuple}: @@ -307,78 +304,43 @@ julia> build_transformation_backend!(inf_model) julia> trans_model = transformation_model(inf_model); julia> print(trans_model) -Min y(support: 1)² + y(support: 2)² + y(support: 3)² +Min y(0.0)² + y(5.0)² + y(10.0)² Subject to - y(support: 1) = 1 - g(support: 1) = 0 - g(support: 4) = 0 - g(support: 7) = 0 - g(support: 10) = 0 - ∂/∂t[g(t, x)](support: 1) + ∂/∂t[g(t, x)](support: 4) + ∂/∂t[g(t, x)](support: 7) + ∂/∂t[g(t, x)](support: 10) = 42 - ∂/∂t[g(t, x)](support: 2) + ∂/∂t[g(t, x)](support: 5) + ∂/∂t[g(t, x)](support: 8) + ∂/∂t[g(t, x)](support: 11) = 42 - ∂/∂t[g(t, x)](support: 3) + ∂/∂t[g(t, x)](support: 6) + ∂/∂t[g(t, x)](support: 9) + ∂/∂t[g(t, x)](support: 12) = 42 - g(support: 1) - g(support: 2) + 5 ∂/∂t[g(t, x)](support: 2) = 0 - g(support: 2) - g(support: 3) + 5 ∂/∂t[g(t, x)](support: 3) = 0 - g(support: 4) - g(support: 5) + 5 ∂/∂t[g(t, x)](support: 5) = 0 - g(support: 5) - g(support: 6) + 5 ∂/∂t[g(t, x)](support: 6) = 0 - g(support: 7) - g(support: 8) + 5 ∂/∂t[g(t, x)](support: 8) = 0 - g(support: 8) - g(support: 9) + 5 ∂/∂t[g(t, x)](support: 9) = 0 - g(support: 10) - g(support: 11) + 5 ∂/∂t[g(t, x)](support: 11) = 0 - g(support: 11) - g(support: 12) + 5 ∂/∂t[g(t, x)](support: 12) = 0 - y(support: 1)² + 3 g(support: 1) ≤ 2 - y(support: 2)² + 3 g(support: 2) ≤ 2 - y(support: 3)² + 3 g(support: 3) ≤ 2 - y(support: 1)² + 3 g(support: 4) ≤ 2 - y(support: 2)² + 3 g(support: 5) ≤ 2 - y(support: 3)² + 3 g(support: 6) ≤ 2 - y(support: 1)² + 3 g(support: 7) ≤ 2 - y(support: 2)² + 3 g(support: 8) ≤ 2 - y(support: 3)² + 3 g(support: 9) ≤ 2 - y(support: 1)² + 3 g(support: 10) ≤ 2 - y(support: 2)² + 3 g(support: 11) ≤ 2 - y(support: 3)² + 3 g(support: 12) ≤ 2 + y(0.0) = 1 + g(0.0, [-1.0, -1.0]) = 0 + g(0.0, [1.0, -1.0]) = 0 + g(0.0, [-1.0, 1.0]) = 0 + g(0.0, [1.0, 1.0]) = 0 + d/dt[g(t, x)](0.0, [-1.0, -1.0]) + d/dt[g(t, x)](0.0, [1.0, -1.0]) + d/dt[g(t, x)](0.0, [-1.0, 1.0]) + d/dt[g(t, x)](0.0, [1.0, 1.0]) = 42 + d/dt[g(t, x)](5.0, [-1.0, -1.0]) + d/dt[g(t, x)](5.0, [1.0, -1.0]) + d/dt[g(t, x)](5.0, [-1.0, 1.0]) + d/dt[g(t, x)](5.0, [1.0, 1.0]) = 42 + d/dt[g(t, x)](10.0, [-1.0, -1.0]) + d/dt[g(t, x)](10.0, [1.0, -1.0]) + d/dt[g(t, x)](10.0, [-1.0, 1.0]) + d/dt[g(t, x)](10.0, [1.0, 1.0]) = 42 + g(0.0, [-1.0, -1.0]) - g(5.0, [-1.0, -1.0]) + 5 d/dt[g(t, x)](5.0, [-1.0, -1.0]) = 0 + g(5.0, [-1.0, -1.0]) - g(10.0, [-1.0, -1.0]) + 5 d/dt[g(t, x)](10.0, [-1.0, -1.0]) = 0 + g(0.0, [1.0, -1.0]) - g(5.0, [1.0, -1.0]) + 5 d/dt[g(t, x)](5.0, [1.0, -1.0]) = 0 + g(5.0, [1.0, -1.0]) - g(10.0, [1.0, -1.0]) + 5 d/dt[g(t, x)](10.0, [1.0, -1.0]) = 0 + g(0.0, [-1.0, 1.0]) - g(5.0, [-1.0, 1.0]) + 5 d/dt[g(t, x)](5.0, [-1.0, 1.0]) = 0 + g(5.0, [-1.0, 1.0]) - g(10.0, [-1.0, 1.0]) + 5 d/dt[g(t, x)](10.0, [-1.0, 1.0]) = 0 + g(0.0, [1.0, 1.0]) - g(5.0, [1.0, 1.0]) + 5 d/dt[g(t, x)](5.0, [1.0, 1.0]) = 0 + g(5.0, [1.0, 1.0]) - g(10.0, [1.0, 1.0]) + 5 d/dt[g(t, x)](10.0, [1.0, 1.0]) = 0 + y(0.0)² + 3 g(0.0, [-1.0, -1.0]) ≤ 2 + y(5.0)² + 3 g(5.0, [-1.0, -1.0]) ≤ 2 + y(10.0)² + 3 g(10.0, [-1.0, -1.0]) ≤ 2 + y(0.0)² + 3 g(0.0, [1.0, -1.0]) ≤ 2 + y(5.0)² + 3 g(5.0, [1.0, -1.0]) ≤ 2 + y(10.0)² + 3 g(10.0, [1.0, -1.0]) ≤ 2 + y(0.0)² + 3 g(0.0, [-1.0, 1.0]) ≤ 2 + y(5.0)² + 3 g(5.0, [-1.0, 1.0]) ≤ 2 + y(10.0)² + 3 g(10.0, [-1.0, 1.0]) ≤ 2 + y(0.0)² + 3 g(0.0, [1.0, 1.0]) ≤ 2 + y(5.0)² + 3 g(5.0, [1.0, 1.0]) ≤ 2 + y(10.0)² + 3 g(10.0, [1.0, 1.0]) ≤ 2 ``` This precisely matches what we found analytically. Note that the unique support -combinations are determined automatically and are represented visually as -`support: #`. The precise support values can be looked up via `supports`: -```jldoctest trans_example -julia> supports(y) -3-element Vector{Tuple}: - (0.0,) - (5.0,) - (10.0,) - -julia> supports(g) -12-element Vector{Tuple}: - (0.0, [-1.0, -1.0]) - (5.0, [-1.0, -1.0]) - (10.0, [-1.0, -1.0]) - (0.0, [1.0, -1.0]) - (5.0, [1.0, -1.0]) - (10.0, [1.0, -1.0]) - (0.0, [-1.0, 1.0]) - (5.0, [-1.0, 1.0]) - (10.0, [-1.0, 1.0]) - (0.0, [1.0, 1.0]) - (5.0, [1.0, 1.0]) - (10.0, [1.0, 1.0]) - -julia> supports(g, ndarray = true) # format it as an n-dimensional array (t by x[1] by x[2]) -3×2×2 Array{Tuple, 3}: -[:, :, 1] = - (0.0, [-1.0, -1.0]) (0.0, [1.0, -1.0]) - (5.0, [-1.0, -1.0]) (5.0, [1.0, -1.0]) - (10.0, [-1.0, -1.0]) (10.0, [1.0, -1.0]) - -[:, :, 2] = - (0.0, [-1.0, 1.0]) (0.0, [1.0, 1.0]) - (5.0, [-1.0, 1.0]) (5.0, [1.0, 1.0]) - (10.0, [-1.0, 1.0]) (10.0, [1.0, 1.0]) -``` +combinations are determined automatically. ## TranscriptionOpt `InfiniteOpt.TranscriptionOpt` is a sub-module which principally implements -[]`TranscriptionBackend`](@ref)s and its related access/modification methods. Thus, +[`TranscriptionBackend`](@ref)s and its related access/modification methods. Thus, this section will detail what these are and how they work. ### TranscriptionBackends @@ -438,17 +400,17 @@ yet. Next we can retrieve the `JuMP` variable(s) for a particular `InfiniteOpt` variable via [`transformation_variable`](@ref). For finite variables, this will -be a one to one mapping, and for infinite variables a list of supported variables -will be returned in the order of the supports. Following the initial example in +be a one to one mapping, and for infinite variables an array will be returned that corresponds +to the underlying supports. Following the initial example in the basic usage section, this is done: ```jldoctest transcribe julia> build_transformation_backend!(inf_model); backend = transformation_backend(inf_model); julia> transformation_variable(y, backend) 3-element Vector{VariableRef}: - y(support: 1) - y(support: 2) - y(support: 3) + y(0.0) + y(5.0) + y(10.0) julia> transformation_variable(z, backend) z @@ -468,23 +430,14 @@ julia> supports(y) ``` !!! note - 1. Note that like `supports`, the `transformation_[obj]` methods also employ the - `label::Type{AbstractSupportLabel} = PublicLabel` keyword argument that by - default will return variables/expressions/constraints associated with public - supports. The full set (e.g., ones corresponding to internal collocation nodes) - is obtained via `label = All`. - 2. These methods also employ the `ndarray::Bool` keyword argument that will cause the - output to be formatted as an n-dimensional array where the dimensions - correspond to the infinite parameter dependencies. For example, if we have an - infinite variable `y(t, ξ)`, and we invoke a query method with `ndarray = true` - then we'll get a matrix whose dimensions correspond to the supports of `t` and - `ξ`, respectively. Also, if `ndarray = true` then `label` correspond to the - intersection of supports labels in contrast to its default of invoking the union - of the labels. + Note that like `supports`, the `transformation_[obj]` methods also employ the + `label::Type{AbstractSupportLabel} = PublicLabel` keyword argument that by + default will return variables/expressions/constraints associated with public + supports. The full set (e.g., ones corresponding to internal collocation nodes) + is obtained via `label = All`. Likewise, [`transformation_constraint`](@ref transformation_constraint(::InfOptConstraintRef)) and -[`supports`](@ref supports(::InfOptConstraintRef)) can be used with constraints to find their transcribed -equivalents in the `JuMP` model and determine their supports. +[`supports`](@ref supports(::InfOptConstraintRef)) can be used with constraints to find their transcribed equivalents in the `JuMP` model and determine their supports. We can also do this with measures and expressions: ```jldoctest transcribe @@ -494,16 +447,16 @@ support_sum{t}[y(t)²] julia> build_transformation_backend!(inf_model) julia> transformation_variable(meas) -y(support: 1)² + y(support: 2)² + y(support: 3)² +y(0.0)² + y(5.0)² + y(10.0)² julia> supports(meas) () julia> transformation_expression(y^2 + z - 42) 3-element Vector{AbstractJuMPScalar}: - y(support: 1)² + z - 42 - y(support: 2)² + z - 42 - y(support: 3)² + z - 42 + y(0.0)² + z - 42 + y(5.0)² + z - 42 + y(10.0)² + z - 42 julia> supports(y^2 + z - 42) 3-element Vector{Tuple}: diff --git a/src/TranscriptionOpt/model.jl b/src/TranscriptionOpt/model.jl index 1fcb72d3..1e6ff4f2 100644 --- a/src/TranscriptionOpt/model.jl +++ b/src/TranscriptionOpt/model.jl @@ -15,9 +15,11 @@ mutable struct TranscriptionData infvar_lookup::Dict{InfiniteOpt.GeneralVariableRef, Dict{Vector{Float64}, JuMP.VariableRef}} infvar_mappings::Dict{InfiniteOpt.GeneralVariableRef, Array{JuMP.VariableRef}} infvar_supports::Dict{InfiniteOpt.GeneralVariableRef, Array{Tuple}} - infvar_support_labels::Dict{InfiniteOpt.GeneralVariableRef, Array{Set{DataType}}} finvar_mappings::Dict{InfiniteOpt.GeneralVariableRef, JuMP.VariableRef} + # Metadata + valid_indices::Dict{Any, Array{Bool}} + # Internal variables (created via internal measure expansions) semi_infinite_vars::Vector{InfiniteOpt.SemiInfiniteVariable{InfiniteOpt.GeneralVariableRef}} semi_lookup::Dict{Tuple{InfiniteOpt.GeneralVariableRef, Dict{Int, Float64}}, InfiniteOpt.GeneralVariableRef} @@ -28,15 +30,12 @@ mutable struct TranscriptionData measure_lookup::Dict{InfiniteOpt.GeneralVariableRef, Dict{Vector{Float64}, Int}} measure_mappings::Dict{InfiniteOpt.GeneralVariableRef, Array{JuMP.AbstractJuMPScalar}} measure_supports::Dict{InfiniteOpt.GeneralVariableRef, Array{Tuple}} - measure_support_labels::Dict{InfiniteOpt.GeneralVariableRef, Array{Set{DataType}}} # Constraint information constr_mappings::Dict{InfiniteOpt.InfOptConstraintRef, Array{JuMP.ConstraintRef}} constr_supports::Dict{InfiniteOpt.InfOptConstraintRef, Array{Tuple}} - constr_support_labels::Dict{InfiniteOpt.InfOptConstraintRef, - Array{Set{DataType}}} # Collected Supports supports::Tuple @@ -50,8 +49,9 @@ mutable struct TranscriptionData Dict{InfiniteOpt.GeneralVariableRef, Dict{Vector{Float64}, JuMP.VariableRef}}(), Dict{InfiniteOpt.GeneralVariableRef, Array{JuMP.VariableRef}}(), Dict{InfiniteOpt.GeneralVariableRef, Array{Tuple}}(), - Dict{InfiniteOpt.GeneralVariableRef, Array{Set{DataType}}}(), Dict{InfiniteOpt.GeneralVariableRef, JuMP.VariableRef}(), + # meta data + Dict{Any, Array{Bool}}(), # internal variables Vector{InfiniteOpt.SemiInfiniteVariable{InfiniteOpt.GeneralVariableRef}}(), Dict{Tuple{InfiniteOpt.GeneralVariableRef, Dict{Int, Float64}}, InfiniteOpt.GeneralVariableRef}(), @@ -61,11 +61,9 @@ mutable struct TranscriptionData Dict{InfiniteOpt.GeneralVariableRef, Dict{Vector{Float64}, Int}}(), Dict{InfiniteOpt.GeneralVariableRef, Array{JuMP.AbstractJuMPScalar}}(), Dict{InfiniteOpt.GeneralVariableRef, Array{Tuple}}(), - Dict{InfiniteOpt.GeneralVariableRef, Array{Set{DataType}}}(), # constraint info Dict{InfiniteOpt.InfOptConstraintRef, Array{JuMP.ConstraintRef}}(), Dict{InfiniteOpt.InfOptConstraintRef, Array{Tuple}}(), - Dict{InfiniteOpt.InfOptConstraintRef, Array{Set{DataType}}}(), # support storage (), (), @@ -79,8 +77,8 @@ function Base.empty!(data::TranscriptionData) empty!(data.infvar_lookup) empty!(data.infvar_mappings) empty!(data.infvar_supports) - empty!(data.infvar_support_labels) empty!(data.finvar_mappings) + empty!(data.valid_indices) empty!(data.semi_infinite_vars) empty!(data.semi_lookup) data.last_point_index = 0 @@ -88,10 +86,8 @@ function Base.empty!(data::TranscriptionData) empty!(data.measure_lookup) empty!(data.measure_mappings) empty!(data.measure_supports) - empty!(data.measure_support_labels) empty!(data.constr_mappings) empty!(data.constr_supports) - empty!(data.constr_support_labels) data.supports = () data.support_labels = () data.has_internal_supports = false @@ -200,20 +196,67 @@ end ## truncate a collection according to a label # 0-Array -function _truncate_by_label(arr::Array{T, 0}, labels, label) where {T} - return labels[] <:label ? arr : arr[[]] +function _truncate_by_label( + arr::Array{T, 0}, + labels::Tuple{}, + label, + ::Nothing + ) where {T} + return arr +end + +# Vector (no valid indices to worry about) +function _truncate_by_label( + arr::Vector, + labels::Tuple{Vector{Set{DataType}}}, + label, + ::Nothing + ) + inds = map(s -> any(l -> l <: label, s), labels[1]) + return all(inds) ? arr : arr[inds] +end + +# Vector (has 1-D valid indices to enforce) +function _truncate_by_label( + arr::Vector, + labels::Tuple{Vector{Set{DataType}}}, + label, + valid_idxs::Vector{Bool} + ) + new_labels = (labels[1][valid_idxs], ) + return _truncate_by_label(arr, new_labels, label, nothing) end -# Vector -function _truncate_by_label(arr::Vector, labels, label) - inds = map(s -> any(l -> l <: label, s), labels) - return arr[inds] +# Vector (has N-D valid indices to enforce) +function _truncate_by_label( + arr::Vector, + labels::NTuple{N, Vector{Set{DataType}}}, + label, + valid_idxs::Array{Bool, N} + ) where {N} + label_idx_array = zeros(Bool, size(valid_idxs)...) + label_idxs = (map(s -> any(l -> l <: label, s), sets) for sets in labels) + label_idx_array[label_idxs...] .= true + return arr[label_idx_array[valid_idxs]] end # Array -function _truncate_by_label(arr::Array{T, N}, labels, label) where {T, N} - firsts = (labels[(j == i ? Colon() : 1 for j in 1:N)...] for j in 1:N) - return arr[(findall(s -> any(l -> l <: label, s), sets) for sets in firsts)...] +function _truncate_by_label( + arr::Array{T, N}, + labels::NTuple{N, Vector{Set{DataType}}}, + label, + ::Nothing + ) where {T, N} + # TODO carefully revise the logic behind the interesection of different axes + return arr[(map(s -> any(l -> l <: label, s), sets) for sets in labels)...] +end + +# High-level +function _truncate_by_label(arr, ref, label, group_idxs, backend) + data = backend.data + labels = Tuple(data.support_labels[i][1:end-1] for i in group_idxs) + valid_idxs = get(data.valid_indices, ref, nothing) + return _truncate_by_label(arr, labels, label, valid_idxs) end """ @@ -311,8 +354,8 @@ function transcription_variable( if _ignore_label(backend, label) return vars else - labels = transcription_data(backend).infvar_support_labels[vref] - return _truncate_by_label(vars, labels, label) + group_idxs = InfiniteOpt.parameter_group_int_indices(vref) + return _truncate_by_label(vars, vref, label, group_idxs, backend) end end @@ -328,19 +371,17 @@ function transcription_variable( support_indices = support_index_iterator(backend, group_idxs) dims = size(support_indices)[group_idxs] vals = Array{Float64, length(dims)}(undef, dims...) - labels = Array{Set{DataType}, length(dims)}(undef, dims...) # iterate over the indices and compute the values for idx in support_indices supp = index_to_support(backend, idx) val_idx = idx.I[group_idxs] - @inbounds labels[val_idx...] = index_to_labels(backend, idx) @inbounds vals[val_idx...] = transcription_expression(fref, backend, supp) end # return the values if _ignore_label(backend, label) return vals else - return _truncate_by_label(vals, labels, label) + return _truncate_by_label(vals, fref, label, group_idxs, backend) end end @@ -410,10 +451,10 @@ function InfiniteOpt.variable_supports( end supps = transcription_data(backend).infvar_supports[vref] if _ignore_label(backend, label) - return vals + return supps else - labels = transcription_data(backend).infvar_support_labels[vref] - return _truncate_by_label(supps, labels, label) + group_idxs = InfiniteOpt.parameter_group_int_indices(dvref) + return _truncate_by_label(supps, vref, label, group_idxs, backend) end end @@ -428,19 +469,17 @@ function InfiniteOpt.variable_supports( support_indices = support_index_iterator(backend, group_idxs) dims = size(support_indices)[group_idxs] supps = Array{Tuple, length(dims)}(undef, dims...) - labels = Array{Set{DataType}, length(dims)}(undef, dims...) param_supps = parameter_supports(backend) # iterate over the indices and compute the values for idx in support_indices val_idx = idx.I[group_idxs] - @inbounds labels[val_idx...] = index_to_labels(backend, idx) @inbounds supps[val_idx...] = Tuple(param_supps[j][idx[j]] for j in group_idxs) end # return the values if _ignore_label(backend, label) - return vals + return supps else - return _truncate_by_label(supps, labels, label) + return _truncate_by_label(supps, fref, label, group_idxs, backend) end end @@ -550,8 +589,8 @@ function transcription_variable( if length(exprs) > 1 && _ignore_label(backend, label) return exprs elseif length(exprs) > 1 - labels = transcription_data(backend).measure_support_labels[mref] - return _truncate_by_label(exprs, labels, label) + group_idxs = InfiniteOpt.parameter_group_int_indices(mref) + return _truncate_by_label(exprs, mref, label, group_idxs, backend) else return first(exprs) end @@ -585,8 +624,8 @@ function InfiniteOpt.variable_supports( if length(supps) > 1 && _ignore_label(backend, label) return supps elseif length(supps) > 1 - labels = transcription_data(backend).measure_support_labels[mref] - return _truncate_by_label(supps, labels, label) + group_idxs = InfiniteOpt.parameter_group_int_indices(mref) + return _truncate_by_label(supps, mref, label, group_idxs, backend) else return first(supps) end @@ -637,17 +676,15 @@ function transcription_expression( support_indices = support_index_iterator(backend, group_idxs) dims = size(support_indices)[group_idxs] exprs = Array{JuMP.AbstractJuMPScalar, length(dims)}(undef, dims...) - labels = Array{Set{DataType}, length(dims)}(undef, dims...) # iterate over the indices and compute the values for idx in support_indices supp = index_to_support(backend, idx) expr_idx = idx.I[group_idxs] - @inbounds labels[expr_idx...] = index_to_labels(backend, idx) @inbounds exprs[expr_idx...] = transcription_expression(expr, backend, supp) end # return the values if !_ignore_label(backend, label) - exprs = _truncate_by_label(exprs, labels, label) + exprs = _truncate_by_label(exprs, nothing, label, group_idxs, backend) end return length(support_indices) > 1 ? exprs : first(exprs) end @@ -713,18 +750,15 @@ function InfiniteOpt.expression_supports( support_indices = support_index_iterator(backend, group_idxs) dims = size(support_indices)[group_idxs] supps = Array{Tuple, length(dims)}(undef, dims...) - labels = Array{Set{DataType}, length(dims)}(undef, dims...) param_supps = parameter_supports(backend) # iterate over the indices and compute the values for idx in support_indices - supp = index_to_support(backend, idx) expr_idx = idx.I[group_idxs] - @inbounds labels[expr_idx...] = index_to_labels(backend, idx) - @inbounds supps[expr_idx...] = Tuple(param_supps[j][idx[j]] for j in group_int_idxs) + @inbounds supps[expr_idx...] = Tuple(param_supps[j][idx[j]] for j in group_idxs) end # return the values if !_ignore_label(backend, label) - supps = _truncate_by_label(supps, labels, label) + supps = _truncate_by_label(supps, nothing, label, group_idxs, backend) end return length(support_indices) > 1 ? supps : first(supps) end @@ -776,8 +810,8 @@ function transcription_constraint( if length(constr) > 1 && _ignore_label(backend, label) return constr elseif length(constr) > 1 - labels = transcription_data(backend).constr_support_labels[cref] - return _truncate_by_label(constr, labels, label) + group_idxs = InfiniteOpt.parameter_group_int_indices(cref) + return _truncate_by_label(constr, cref, label, group_idxs, backend) else return first(constr) end @@ -834,8 +868,8 @@ function InfiniteOpt.constraint_supports( if length(supps) > 1 && _ignore_label(backend, label) return supps elseif length(supps) > 1 - labels = transcription_data(backend).constr_support_labels[cref] - return _truncate_by_label(supps, labels, label) + group_idxs = InfiniteOpt.parameter_group_int_indices(cref) + return _truncate_by_label(supps, cref, label, group_idxs, backend) else return first(supps) end @@ -896,23 +930,5 @@ function index_to_support( index::CartesianIndex ) raw_supps = parameter_supports(backend) - return [j for i in eachindex(index.I) for j in raw_supps[i][index[i]]] -end - -""" - index_to_labels(backend::TranscriptionBackend, index::CartesianIndex)::Set{DataType} - -Given a particular support `index` generated via [`support_index_iterator`](@ref) -using `backend`, return the corresponding support label set from `TranscriptionData.support_labels`. -""" -function index_to_labels( - backend::TranscriptionBackend, - index::CartesianIndex - ) - raw_labels = transcription_data(backend).support_labels - labels = Set{DataType}() - for (i, j) in enumerate(index.I) - union!(labels, raw_labels[i][j]) - end - return labels + return Float64[j for i in eachindex(index.I) for j in raw_supps[i][index[i]]] end diff --git a/src/TranscriptionOpt/transcribe.jl b/src/TranscriptionOpt/transcribe.jl index 22ca56a3..e1175ad7 100644 --- a/src/TranscriptionOpt/transcribe.jl +++ b/src/TranscriptionOpt/transcribe.jl @@ -96,6 +96,18 @@ function _format_infinite_info( info.has_start, start, info.binary, info.integer) end +# Set the cutoff for number of infinite parameters to be included in a variable name +const _MaxNumParamsForPrinting = 4 + +# Make variable name with infinite parameters values directly if possible +function _make_var_name(base_name, param_nums, tuple_supp, var_idx) + if length(param_nums) <= _MaxNumParamsForPrinting + return string(base_name, "(", join(tuple_supp, ", "), ")") + else + return string(base_name, "[", join(var_idx, ", "), "]") + end +end + """ transcribe_infinite_variables!( backend::TranscriptionBackend, @@ -125,7 +137,6 @@ function transcribe_infinite_variables!( supp_indices = support_index_iterator(backend, group_idxs) dims = size(supp_indices)[group_idxs] vrefs = Array{JuMP.VariableRef, length(dims)}(undef, dims...) - labels = Array{Set{DataType}, length(dims)}(undef, dims...) supp_type = typeof(Tuple(ones(length(prefs)), prefs)) supps = Array{supp_type, length(dims)}(undef, dims...) lookup_dict = Dict{Vector{Float64}, JuMP.VariableRef}() @@ -134,20 +145,19 @@ function transcribe_infinite_variables!( supp = index_to_support(backend, i)[param_nums] info = _format_infinite_info(var, supp) var_idx = i.I[group_idxs] - v_name = string(base_name, "[", join(var_idx, ","), "]") + tuple_supp = Tuple(supp, prefs) + v_name = _make_var_name(base_name, param_nums, tuple_supp, var_idx) v = JuMP.ScalarVariable(info) jump_vref = JuMP.add_variable(backend.model, v, v_name) @inbounds vrefs[var_idx...] = jump_vref lookup_dict[supp] = jump_vref - @inbounds supps[var_idx...] = Tuple(supp, prefs) - @inbounds labels[var_idx...] = index_to_labels(backend, i) + @inbounds supps[var_idx...] = tuple_supp end # save the transcription information ivref = InfiniteOpt.GeneralVariableRef(model, idx) data = transcription_data(backend) data.infvar_lookup[ivref] = lookup_dict data.infvar_mappings[ivref] = vrefs - data.infvar_support_labels[ivref] = labels data.infvar_supports[ivref] = supps end return @@ -178,7 +188,6 @@ function _transcribe_derivative_variable(dref, d, backend) supp_indices = support_index_iterator(backend, group_idxs) dims = size(supp_indices)[group_idxs] vrefs = Array{JuMP.VariableRef, length(dims)}(undef, dims...) - labels = Array{Set{DataType}, length(dims)}(undef, dims...) supp_type = typeof(Tuple(ones(length(prefs)), prefs)) supps = Array{supp_type, length(dims)}(undef, dims...) lookup_dict = Dict{Vector{Float64}, JuMP.VariableRef}() @@ -187,19 +196,18 @@ function _transcribe_derivative_variable(dref, d, backend) supp = index_to_support(backend, i)[param_nums] info = _format_derivative_info(d, supp) var_idx = i.I[group_idxs] - d_name = string(base_name, "[", join(var_idx, ","), "]") + tuple_supp = Tuple(supp, prefs) + d_name = _make_var_name(base_name, param_nums, tuple_supp, var_idx) d_var = JuMP.ScalarVariable(info) jump_vref = JuMP.add_variable(backend.model, d_var, d_name) @inbounds vrefs[var_idx...] = jump_vref lookup_dict[supp] = jump_vref - @inbounds supps[var_idx...] = Tuple(supp, prefs) - @inbounds labels[var_idx...] = index_to_labels(backend, i) + @inbounds supps[var_idx...] = tuple_supp end # save the transcription information data = transcription_data(backend) data.infvar_lookup[dref] = lookup_dict data.infvar_mappings[dref] = vrefs - data.infvar_support_labels[dref] = labels data.infvar_supports[dref] = supps return end @@ -259,12 +267,11 @@ function _set_semi_infinite_variable_mapping( ivref_param_nums = InfiniteOpt._parameter_numbers(ivref) eval_supps = var.eval_supports group_idxs = var.group_int_idxs - prefs = InfiniteOpt.raw_parameter_refs(rvref) + prefs = InfiniteOpt.raw_parameter_refs(var) # prepare for iterating over its supports supp_indices = support_index_iterator(backend, group_idxs) dims = size(supp_indices)[group_idxs] vrefs = Array{JuMP.VariableRef, length(dims)}(undef, dims...) - labels = Array{Set{DataType}, length(dims)}(undef, dims...) supp_type = typeof(Tuple(ones(length(prefs)), prefs)) supps = Array{supp_type, length(dims)}(undef, dims...) lookup_dict = Dict{Vector{Float64}, JuMP.VariableRef}() @@ -272,31 +279,29 @@ function _set_semi_infinite_variable_mapping( # map a variable for each support for i in supp_indices raw_supp = index_to_support(backend, i) + var_idx = i.I[group_idxs] # ensure this support is valid with the reduced restriction if any(!isnan(raw_supp[ivref_param_nums[k]]) && raw_supp[ivref_param_nums[k]] != v for (k, v) in eval_supps) - valid_idxs[lin_idx] = false + valid_idxs[var_idx...] = false continue end # map to the current transcription variable supp = raw_supp[param_nums] ivref_supp = [haskey(eval_supps, j) ? eval_supps[j] : raw_supp[k] for (j, k) in enumerate(ivref_param_nums)] - var_idx = i.I[group_idxs] jump_vref = lookup_by_support(ivref, backend, ivref_supp) @inbounds vrefs[var_idx...] = jump_vref lookup_dict[supp] = jump_vref @inbounds supps[var_idx...] = Tuple(supp, prefs) - @inbounds labels[var_idx...] = index_to_labels(backend, i) end # truncate vrefs if any supports were skipped because of dependent parameter supps and save data = transcription_data(backend) if !all(valid_idxs) data.infvar_mappings[rvref] = vrefs[valid_idxs] - data.infvar_support_labels[rvref] = labels[valid_idxs] data.infvar_supports[rvref] = supps[valid_idxs] + data.valid_indices[rvref] = valid_idxs else data.infvar_mappings[rvref] = vrefs - data.infvar_support_labels[rvref] = labels data.infvar_supports[rvref] = supps end data.infvar_lookup[rvref] = lookup_dict @@ -563,7 +568,6 @@ function transcribe_measures!( supp_indices = support_index_iterator(backend, group_idxs) dims = size(supp_indices)[group_idxs] exprs = Array{JuMP.AbstractJuMPScalar, length(dims)}(undef, dims...) - labels = Array{Set{DataType}, length(dims)}(undef, dims...) supp_type = typeof(Tuple(ones(length(prefs)), prefs)) supps = Array{supp_type, length(dims)}(undef, dims...) lookup_dict = Dict{Vector{Float64}, Int}() @@ -575,13 +579,11 @@ function transcribe_measures!( supp = raw_supp[meas.parameter_nums] lookup_dict[supp] = lin_idx @inbounds supps[expr_idx...] = Tuple(supp, prefs) - @inbounds labels[expr_idx...] = index_to_labels(backend, i) end # save the transcription information data = transcription_data(backend) data.measure_lookup[mref] = lookup_dict data.measure_mappings[mref] = exprs - data.measure_support_labels[mref] = labels data.measure_supports[mref] = supps end return @@ -765,7 +767,6 @@ function transcribe_constraints!( dims = size(supp_indices)[group_idxs] crefs = Array{JuMP.ConstraintRef, length(dims)}(undef, dims...) supps = Array{Tuple, length(dims)}(undef, dims...) - labels = Array{Set{DataType}, length(dims)}(undef, dims...) valid_idxs = ones(Bool, dims...) # iterate over the support indices for the info constraints if object.is_info_constraint @@ -779,7 +780,6 @@ function transcribe_constraints!( @inbounds crefs[con_idx...] = info_ref @inbounds supps[con_idx...] = Tuple(param_supps[j][i[j]] for j in group_idxs) - @inbounds labels[con_idx...] = index_to_labels(backend, i) else valid_idxs[con_idx...] = false end @@ -798,13 +798,18 @@ function transcribe_constraints!( con_idx = i.I[group_idxs] if _support_in_restrictions(raw_supp, restrict_indices, restrict_domains) - new_name = isempty(name) ? "" : string(name, "[", join(con_idx, ","), "]") + new_name = if isempty(name) + "" + elseif isempty(group_idxs) + name + else + string(name, "[", join(con_idx, ", "), "]") + end new_cref = _process_constraint(backend, constr, func, set, raw_supp, new_name) @inbounds crefs[con_idx...] = new_cref @inbounds supps[con_idx...] = Tuple(param_supps[j][i[j]] for j in group_idxs) - @inbounds labels[con_idx...] = index_to_labels(backend, i) else valid_idxs[con_idx...] = false end @@ -816,11 +821,10 @@ function transcribe_constraints!( if !all(valid_idxs) data.constr_mappings[cref] = crefs[valid_idxs] data.constr_supports[cref] = supps[valid_idxs] - data.constr_support_labels[cref] = labels[valid_idxs] + data.valid_indices[cref] = valid_idxs else data.constr_mappings[cref] = crefs data.constr_supports[cref] = supps - data.constr_support_labels[cref] = labels end end return diff --git a/src/backends.jl b/src/backends.jl index 7575a86f..e660f0e8 100644 --- a/src/backends.jl +++ b/src/backends.jl @@ -728,19 +728,18 @@ Certain backends may also allow the use of keyward arguments. The default backend `TranscriptionOpt` uses the keyword arguments: - `label::Type{<:AbstractSupportLabel} = PublicLabel` -- `ndarray::Bool = false` By default only variables corresponding to public supports are returned, the -full set can be accessed via `label = All`. Moreover, all the transcripted variables -of infinite variables are returned as a list. However, a n-dimensional array -can be obtained via `ndarray = true` which is handy when the variable has multiple -infinite parameter dependencies. +full set can be accessed via `label = All`. Where possible, all the transcripted +variables of infinite variables are returned as an n-dimensional array +where each dimension is determined by the each independent group of +infinite parameters it depends on. **Example** ```julia-repl julia> transformation_variable(x) # infinite variable 2-element Array{VariableRef,1}: - x(support: 1) - x(support: 2) + x(0.0) + x(1.0) julia> transformation_variable(z) # finite variable z @@ -782,7 +781,6 @@ end supports( vref::DecisionVariableRef; [label::Type{<:AbstractSupportLabel} = PublicLabel, - ndarray::Bool = false, kwargs...] )::Vector{<:Tuple} @@ -790,16 +788,16 @@ Return the supports associated with `vref` in the transformation model. Errors if [`InfiniteOpt.variable_supports`](@ref) has not been extended for the transformation backend type or if `vref` is not reformulated in the transformation backend. -The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ +The keyword argument `label` is what `TranscriptionOpt` employs and `kwargs` denote extra ones that user extensions may employ in accordance with their implementation of `variable_supports`. Errors if such an extension has not been written. By default only the public supports are returned, the -full set can be accessed via `label = All`. Moreover, the supports of infinite -variables are returned as a list. However, a n-dimensional array -can be obtained via `ndarray = true` which is handy when the variable has multiple -infinite parameter dependencies. +full set can be accessed via `label = All`. Where possible, all the supports +of infinite variables are returned as an n-dimensional array +where each dimension is determined by the each independent group of +infinite parameters it depends on. **Example** ```julia-repl @@ -851,7 +849,6 @@ end transformation_expression( expr::JuMP.AbstractJuMPScalar; [label::Type{<:AbstractSupportLabel} = PublicLabel, - ndarray::Bool = false, kwargs...] ) @@ -860,22 +857,22 @@ to `expr`. Also errors if no such expression can be found in the transformation backend (meaning one or more of the underlying variables have not been transformed). -The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ +The keyword argument `label` is what `TranscriptionOpt` employs and `kwargs` denote extra ones that user extensions may employ in accordance with their implementation of [`transformation_expression`](@ref). Errors if such an extension has not been written. By default only the expressions associated with public supports are returned, the -full set can be accessed via `label = All`. Moreover, infinite expressions are -returned as a list corresponding to their supports. However, a n-dimensional array -can be obtained via `ndarray = true` which is handy when the expression has multiple -infinite parameter dependencies. The corresponding supports are obtained via +full set can be accessed via `label = All`. Where possible, all the transformed +expressions are returned as an n-dimensional array +where each dimension is determined by the each independent group of +infinite parameters it depends on. The corresponding supports are obtained via `supports` using the same keyword arguments. **Example** ```julia-repl julia> transformation_expression(my_expr) # finite expression -x(support: 1) - y +x(0.0) - y ``` """ function transformation_expression(expr::JuMP.AbstractJuMPScalar; kwargs...) @@ -917,23 +914,22 @@ end supports( expr::JuMP.AbstractJuMPScalar; [label::Type{<:AbstractSupportLabel} = PublicLabel, - ndarray::Bool = false, kwargs...] ) Return the support associated with `expr`. Errors if `expr` is not associated with the constraint mappings stored in the transformation backend. -The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ +The keyword arguments `label` is what `TranscriptionOpt` employs and `kwargs` denote extra ones that user extensions may employ in accordance with their implementation of `expression_supports`. Errors if such an extension has not been written. By default only the public supports are returned, the -full set can be accessed via `label = All`. Moreover, the supports of infinite -expressions are returned as a list. However, a n-dimensional array -can be obtained via `ndarray = true` which is handy when the expression has multiple -infinite parameter dependencies. +full set can be accessed via `label = All`. Where possible, all the supports +of an infinite expression are returned as an n-dimensional array +where each dimension is determined by the each independent group of +infinite parameters it depends on. **Example** ```julia-repl @@ -980,7 +976,6 @@ end transformation_constraint( cref::InfOptConstraintRef; [label::Type{<:AbstractSupportLabel} = PublicLabel, - ndarray::Bool = false, kwargs...] ) @@ -988,22 +983,22 @@ Return the reformulation constraint(s) stored in the transformation backend that correspond to `cref`. Errors if no such constraint can be found in the transformation backend. -The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ +The keyword argument `label` is what `TranscriptionOpt` employs and `kwargs` denote extra ones that user extensions may employ in accordance with their implementation of [`transformation_constraint`](@ref). Errors if such an extension has not been written. By default only the constraints associated with public supports are returned, the -full set can be accessed via `label = All`. Moreover, infinite constraints are -returned as a list corresponding to their supports. However, a n-dimensional array -can be obtained via `ndarray = true` which is handy when the constraint has multiple -infinite parameter dependencies. The corresponding supports are obtained via +full set can be accessed via `label = All`. Where possible, all the transformed +cosntraints are returned as an n-dimensional array +where each dimension is determined by the each independent group of +infinite parameters it depends on. The corresponding supports are obtained via `supports` using the same keyword arguments. **Example** ```julia-repl julia> transformation_constraint(c1) # finite constraint -c1 : x(support: 1) - y <= 3.0 +c1 : x(0.0) - y <= 3.0 ``` """ function transformation_constraint( @@ -1037,22 +1032,21 @@ end """ supports(cref::InfOptConstraintRef; [label::Type{<:AbstractSupportLabel} = PublicLabel, - ndarray::Bool = false, kwargs...]) Return the support associated with `cref`. Errors if `cref` is not associated with the constraint mappings stored in the transformation backend. -The keyword arugments `label` and `ndarray` are what `TranscriptionOpt` employ +The keyword argument `label` is what `TranscriptionOpt` employs and `kwargs` denote extra ones that user extensions may employ in accordance with their implementation of `constraint_supports`. Errors if such an extension has not been written. By default only the public supports are returned, the -full set can be accessed via `label = All`. Moreover, the supports of infinite -constraints are returned as a list. However, a n-dimensional array -can be obtained via `ndarray = true` which is handy when the constraint has multiple -infinite parameter dependencies. +full set can be accessed via `label = All`. Where possible, all the supports +of the constraint are returned as an n-dimensional array +where each dimension is determined by the each independent group of +infinite parameters it depends on. **Example** ```julia-repl diff --git a/src/results.jl b/src/results.jl index 5e24123f..439697c8 100644 --- a/src/results.jl +++ b/src/results.jl @@ -286,12 +286,11 @@ being used. The default backend `TranscriptionOpt` uses the keyword arguments: - `result::Int = 1`: indexes the solution result to be queried - `label::Type{<:AbstractSupportLabel} = PublicLabel`: the label of supports to be returned -- `ndarray::Bool = false`: indicates whether the output should be formatted as an array By default only the values associated with public supports (i.e., `PublicLabel`s) -are returned, the full set can be accessed via `label = All`. Moreover, the values -of infinite variables are returned as a list. However, a n-dimensional array -can be obtained via `ndarray = true` which is handy when the variable has multiple -infinite parameter dependencies. +are returned, the full set can be accessed via `label = All`. Where possible, all the +values are returned as an n-dimensional array +where each dimension is determined by the each independent group of +infinite parameters they depend on. To provide context for the values, it may be helpful to also query the variable's `parameter_refs` and `supports` which will have a one-to-one correspondence with @@ -327,12 +326,11 @@ being used. The default backend `TranscriptionOpt` uses the keyword arguments: - `result::Int = 1`: indexes the solution result to be queried - `label::Type{<:AbstractSupportLabel} = PublicLabel`: the label of supports to be returned -- `ndarray::Bool = false`: indicates whether the output should be formatted as an array By default only the values associated with public supports (i.e., `PublicLabel`s) -are returned, the full set can be accessed via `label = All`. Moreover, the values -of infinite expressions are returned as a list. However, a n-dimensional array -can be obtained via `ndarray = true` which is handy when the expression has multiple -infinite parameter dependencies. +are returned, the full set can be accessed via `label = All`. Where possible, all the +values of infinite expressions are returned as an n-dimensional array +where each dimension is determined by the each independent group of +infinite parameters they depend on. To provide context for the values, it may be helpful to also query the expression's `parameter_refs` and `supports` which will have a one-to-one correspondence with @@ -391,12 +389,11 @@ being used. The default backend `TranscriptionOpt` uses the keyword arguments: - `result::Int = 1`: indexes the solution result to be queried - `label::Type{<:AbstractSupportLabel} = PublicLabel`: the label of supports to be returned -- `ndarray::Bool = false`: indicates whether the output should be formatted as an array By default only the values associated with public supports (i.e., `PublicLabel`s) -are returned, the full set can be accessed via `label = All`. Moreover, the values -of infinite constraints are returned as a list. However, a n-dimensional array -can be obtained via `ndarray = true` which is handy when the constraint has multiple -infinite parameter dependencies. +are returned, the full set can be accessed via `label = All`. Where possible, all the +values of infinite constraints are returned as an n-dimensional array +where each dimension is determined by the each independent group of +infinite parameters they depend on. To provide context for the values, it may be helpful to also query the constraint's `parameter_refs` and `supports` which will have a one-to-one correspondence with @@ -481,12 +478,11 @@ for (Ref, func, mapper) in ( being used. The default backend `TranscriptionOpt` uses the keyword arguments: - `label::Type{<:AbstractSupportLabel} = PublicLabel`: the label of supports to be returned - - `ndarray::Bool = false`: indicates whether the output should be formatted as an array By default only the values associated with public supports (i.e., `PublicLabel`s) - are returned, the full set can be accessed via `label = All`. Moreover, the values - of infinite variables/constraints are returned as a list. However, a n-dimensional array - can be obtained via `ndarray = true` which is handy when the constraint has multiple - infinite parameter dependencies. + are returned, the full set can be accessed via `label = All`. Where possible, all the + values of infinite objects are returned as an n-dimensional array + where each dimension is determined by the each independent group of + infinite parameters they depend on. To provide context for the values, it may be helpful to also query the `parameter_refs` and `supports` which will have a one-to-one correspondence with @@ -556,12 +552,11 @@ being used. The default backend `TranscriptionOpt` uses the keyword arguments: - `result::Int = 1`: indexes the solution result to be queried - `label::Type{<:AbstractSupportLabel} = PublicLabel`: the label of supports to be returned -- `ndarray::Bool = false`: indicates whether the output should be formatted as an array By default only the values associated with public supports (i.e., `PublicLabel`s) -are returned, the full set can be accessed via `label = All`. Moreover, the duals -of infinite constraints are returned as a list. However, a n-dimensional array -can be obtained via `ndarray = true` which is handy when the constraint has multiple -infinite parameter dependencies. +are returned, the full set can be accessed via `label = All`. Where possible, all the +duals of infinite cosntraints are returned as an n-dimensional array +where each dimension is determined by the each independent group of +infinite parameters they depend on. To provide context for the duals, it may be helpful to also query the constraint's `parameter_refs` and `supports` which will have a one-to-one correspondence with @@ -608,7 +603,7 @@ constraints. The indexing syntax for these is: ```julia report[ref::[GeneralVariableRef/InfOptConstraintRef]; [label::Type{<:AbstractSupportLabel} = PublicLabel, - ndarray::Bool = false, kwargs...]] + kwargs...]] ``` This is enabled for new transformation backends by appropriately diff --git a/src/semi_infinite_variables.jl b/src/semi_infinite_variables.jl index 51a8aa5d..ad9c132a 100644 --- a/src/semi_infinite_variables.jl +++ b/src/semi_infinite_variables.jl @@ -386,19 +386,23 @@ function eval_supports(vref::SemiInfiniteVariableRef)::Dict{Int, Float64} return core_object(vref).eval_supports end -""" - raw_parameter_refs(vref::SemiInfiniteVariableRef)::VectorTuple +# helper version of raw_parameter_refs +function raw_parameter_refs(var::SemiInfiniteVariable) + orig_prefs = raw_parameter_refs(var.infinite_variable_ref) + eval_supps = var.eval_supports + delete_indices = [!haskey(eval_supps, i) for i in eachindex(orig_prefs)] + return Collections.restricted_copy(orig_prefs, delete_indices) +end +""" + raw_parameter_refs(vref::Union{SemiInfiniteVariableRef, SemiInfiniteVariable})::VectorTuple Return the raw [`VectorTuple`](@ref InfiniteOpt.Collections.VectorTuple) of the parameter references that `vref` depends on. This is primarily an internal method where [`parameter_refs`](@ref parameter_refs(vref::SemiInfiniteVariableRef)) is intended as the preferred user function. """ function raw_parameter_refs(vref::SemiInfiniteVariableRef) - orig_prefs = raw_parameter_refs(infinite_variable_ref(vref)) - eval_supps = eval_supports(vref) - delete_indices = [!haskey(eval_supps, i) for i in eachindex(orig_prefs)] - return Collections.restricted_copy(orig_prefs, delete_indices) + return raw_parameter_refs(core_object(vref)) end """ diff --git a/test/TranscriptionOpt/measure.jl b/test/TranscriptionOpt/measure.jl index cf2b205a..e2b79b7a 100644 --- a/test/TranscriptionOpt/measure.jl +++ b/test/TranscriptionOpt/measure.jl @@ -14,16 +14,15 @@ @variable(tb.model, c) @variable(tb.model, d) data = IOTO.transcription_data(tb) - data.infvar_mappings[x] = [a, b, c] - data.infvar_supports[x] = [(0.,), (0.5,), (1.,)] - data.infvar_lookup[x] = Dict{Vector{Float64}, Int}([0] => 1, [0.5] => 2, [1] => 3) - data.infvar_mappings[y] = [a, b, c, d] - data.infvar_supports[y] = [(0., [0., 0.]), (0., [1., 1.]), (1., [0., 0.]), (1., [1., 1.])] - data.infvar_lookup[y] = Dict{Vector{Float64}, Int}([0, 0, 0] => 1, [0, 1, 1] => 2, - [1, 0, 0] => 3, [1, 1, 1] => 4) + data.infvar_mappings[x] = [a, b] + data.infvar_supports[x] = [(0.,), (1.,)] + data.infvar_lookup[x] = Dict([0] => a, [1] => b) + data.infvar_mappings[y] = [a b; c d] + data.infvar_supports[y] = [(0., [0., 0.]) (0., [1., 1.]); (1., [0., 0.]) (1., [1., 1.])] + data.infvar_lookup[y] = Dict([0, 0, 0] => a, [0, 1, 1] => b, [1, 0, 0] => c, [1, 1, 1] => d) data.infvar_mappings[y0] = [a, b] data.infvar_supports[y0] = [(0., [0., 0.]), (0., [1., 1.])] - data.infvar_lookup[y0] = Dict{Vector{Float64}, Int}([0, 0, 0] => 1, [0, 1, 1] => 2) + data.infvar_lookup[y0] = Dict([0, 0, 0] => a, [0, 1, 1] => b) data.finvar_mappings[x0] = a IOTO.set_parameter_supports(tb, m) # test add_point_variable @@ -34,19 +33,19 @@ # add one that hasn't been added vref = GeneralVariableRef(m, -1, PointVariableIndex) @test isequal(InfiniteOpt.add_point_variable(tb, x, Float64[1]), vref) - @test IOTO.transcription_variable(vref) == c + @test IOTO.transcription_variable(vref) == b # add one that has been added internally @test isequal(InfiniteOpt.add_point_variable(tb, x, Float64[1]), vref) - @test IOTO.transcription_variable(vref) == c + @test IOTO.transcription_variable(vref) == b end # test add_semi_infinite_variable @testset "add_semi_infinite_variable" begin # add one that was already added to the infinite model - var = SemiInfiniteVariable(y, Dict{Int, Float64}(1 => 0), [2, 3], [2]) + var = SemiInfiniteVariable(y, Dict(1 => 0.), [2, 3], [2]) @test isequal(InfiniteOpt.add_semi_infinite_variable(tb, var), y0) @test IOTO.transcription_variable(y0) == [a, b] # add a new one - var = SemiInfiniteVariable(y, Dict{Int, Float64}(1 => 1), [2, 3], [2]) + var = SemiInfiniteVariable(y, Dict(1 => 1.), [2, 3], [2]) vref = GeneralVariableRef(m, -1, SemiInfiniteVariableIndex) @test isequal(InfiniteOpt.add_semi_infinite_variable(tb, var), vref) @test isequal(data.semi_infinite_vars, [var]) diff --git a/test/TranscriptionOpt/model.jl b/test/TranscriptionOpt/model.jl index fe669a25..1fbf598c 100644 --- a/test/TranscriptionOpt/model.jl +++ b/test/TranscriptionOpt/model.jl @@ -49,51 +49,41 @@ end # Test query helper functions @testset "Query Formatters" begin - # initialize models - m = InfiniteModel() - @infinite_parameter(m, par in [0, 1], supports = [0, 1]) - @infinite_parameter(m, pars[1:2] in [0, 1]) - @variable(m, x, Infinite(par, pars)) - @variable(m, q, Infinite(pars, par)) - @variable(m, w, Infinite(par)) - @variable(m, x0, Point(x, 0, [0, 0])) - @variable(m, y) - add_supports(par, 0.5, label = InternalLabel) - @variable(m, xrv, SemiInfinite(x, par, [1, pars[2]])) - tb = m.backend - data = IOTO.transcription_data(tb) - data.has_internal_supports = true - data.supports = ([0., 0.5, 1., NaN], [[0., 0.], [1., 1.], [NaN, NaN]]) - s1 = Set([UserDefined]) - s2 = Set([InternalLabel]) - sd = Set{DataType}() - data.support_labels = ([s1, s2, s1, sd], [s1, s1, sd]) - # test _get_array_type - @testset "_get_array_type" begin - @test IOTO._get_array_type(ones(Bool, 2)) == Bool - end - # test _getparameter_group_int_indices - @testset "_getparameter_group_int_indices" begin - @test IOTO._getparameter_group_int_indices(x) == [1, 2] - @test IOTO._getparameter_group_int_indices(y) == [] - @test IOTO._getparameter_group_int_indices(y + x) == [1, 2] - end - # test make_ndarray - @testset "make_ndarray" begin - # test finite variable - @test IOTO.make_ndarray(tb, y, [1], PublicLabel) == [1] - # test ordered infinite variable - @test IOTO.make_ndarray(tb, x, collect(1:6), All) == [1 4; 2 5; 3 6] - @test IOTO.make_ndarray(tb, x, collect(1:6), PublicLabel) == [1 4; 3 6] - # test unordered infinite variable - @test IOTO.make_ndarray(tb, q, collect(1:6), All) == [1 2 3; 4 5 6] - @test IOTO.make_ndarray(tb, q, collect(1:6), PublicLabel) == [1 3; 4 6] - # test infinite variable with single parameter - @test IOTO.make_ndarray(tb, w, collect(1:3), All) == [1, 2, 3] - # test expression - @test IOTO.make_ndarray(tb, w + x, collect(1:6), All) == [1 4; 2 5; 3 6] - # test error - @test_throws ErrorException IOTO.make_ndarray(tb, q, collect(1:3), All) + # test _truncate_by_label + @testset "_truncate_by_label" begin + # 0-dimensional array + a0 = Array{Int, 0}(undef) + a0[] = 1 + l0 = () + @test IOTO._truncate_by_label(a0, l0, PublicLabel, nothing) == a0 + @test IOTO._truncate_by_label(a0, l0, InternalLabel, nothing) == a0 + # vector + a1 = [1, 2, 3] + l1 = ([Set([UserDefined]), Set([UserDefined]), Set([PublicLabel])], ) + @test IOTO._truncate_by_label(a1, l1, PublicLabel, nothing) === a1 + @test IOTO._truncate_by_label(a1, l1, UserDefined, nothing) == [1, 2] + # matrix + a2 = [1 2; 3 4] + l2 = ([Set([UserDefined]), Set([UserDefined])], + [Set([UserDefined]), Set([PublicLabel])]) + @test IOTO._truncate_by_label(a2, l2, PublicLabel, nothing) == a2 + @test IOTO._truncate_by_label(a2, l2, UserDefined, nothing) == [1; 3;;] + # vector w/ valid indices + a1 = [1, 3] + l1 = ([Set([UserDefined]), Set([UserDefined]), Set([PublicLabel]), Set{DataType}()], ) + backend = TranscriptionBackend() + backend.data.valid_indices[42] = [true, false, true] + backend.data.support_labels = l1 + @test IOTO._truncate_by_label(a1, 42, PublicLabel, [1], backend) == a1 + @test IOTO._truncate_by_label(a1, 42, UserDefined, [1], backend) == [1] + # vector w/ array valid indices + a2 = [1, 2, 4] + l2 = ([Set([UserDefined]), Set([UserDefined]), Set{DataType}()], + [Set([UserDefined]), Set([PublicLabel]), Set{DataType}()]) + backend.data.valid_indices[10] = [true true; false true] + backend.data.support_labels = l2 + @test IOTO._truncate_by_label(a2, 10, PublicLabel, [1, 2], backend) == a2 + @test IOTO._truncate_by_label(a2, 10, UserDefined, [1, 2], backend) == [1] end end @@ -117,7 +107,6 @@ end data.supports = ([0., 0.5, 1., NaN], [[0., 0.], [1., 1.], [NaN, NaN]]) s1 = Set([UserDefined]) s2 = Set([InternalLabel]) - s3 = union(s1, s2) sd = Set{DataType}() data.support_labels = ([s1, s2, s1, sd], [s1, s1, sd]) @variable(tb.model, a) @@ -140,8 +129,8 @@ end # test normal data.finvar_mappings[y] = a @test IOTO.transcription_variable(y, tb) == a - data.finvar_mappings[x0] = b - @test IOTO.transcription_variable(x0, tb, ndarray = true, label = All) == b + data.finvar_mappings[x0] = a + @test IOTO.transcription_variable(x0, tb, label = All) == a end # test IOTO.transcription_variable (Infinite, semi-infinite, and derivative) @testset "IOTO.transcription_variable (Infinite)" begin @@ -149,29 +138,22 @@ end @test_throws ErrorException IOTO.transcription_variable(x, tb) @test_throws ErrorException IOTO.transcription_variable(xrv, tb) # test normal - data.infvar_mappings[x] = [a, b, c, d, e, f] - data.infvar_support_labels[x] = [s1, s3, s1, s1, s3, s1] - @test IOTO.transcription_variable(x, tb) == [a, b, c, d, e, f] - @test IOTO.transcription_variable(x, tb, label = All) == [a, b, c, d, e, f] - @test IOTO.transcription_variable(x, tb, label = InternalLabel) == [b, e] - data.infvar_mappings[xrv] = [d, e, f] - data.infvar_support_labels[xrv] = [s1, s3, s1] - @test IOTO.transcription_variable(xrv, tb) == [d, e, f] - @test IOTO.transcription_variable(xrv, tb, label = All) == [d, e, f] - @test IOTO.transcription_variable(xrv, tb, label = InternalLabel) == [e] - # test ndarray - @test IOTO.transcription_variable(x, tb, label = All, ndarray = true) == [a d; b e; c f] - @test IOTO.transcription_variable(x, tb, ndarray = true) == [a d; c f] - @test_throws ErrorException IOTO.transcription_variable(xrv, tb, ndarray = true) + data.infvar_mappings[x] = [a b; c d; e f] + @test IOTO.transcription_variable(x, tb) == [a b; e f] + @test IOTO.transcription_variable(x, tb, label = All) == [a b; c d; e f] + @test isempty(IOTO.transcription_variable(x, tb, label = InternalLabel)) + data.infvar_mappings[xrv] = [b, d, f] + data.valid_indices[xrv] = [false true; false true; false true] + @test IOTO.transcription_variable(xrv, tb) == [b, f] + @test IOTO.transcription_variable(xrv, tb, label = All) == [b, d, f] + @test IOTO.transcription_variable(xrv, tb, label = InternalLabel) == [] end # test IOTO.transcription_variable (Parameter Function) @testset "IOTO.transcription_variable (Parameter Function)" begin # test normal @test IOTO.transcription_variable(f1, tb) == [sin(0), sin(1)] @test IOTO.transcription_variable(f1, tb, label = All) == sin.([0, 0.5, 1]) - # test ndarray - @test IOTO.transcription_variable(f1, tb, label = All, ndarray = true) == sin.([0, 0.5, 1]) - @test IOTO.transcription_variable(f2, tb, ndarray = true) == ones(2, 2) + @test IOTO.transcription_variable(f2, tb) == ones(2, 2) end # test IOTO.transcription_variable (Fallback) @testset "IOTO.transcription_variable (Fallback)" begin @@ -180,16 +162,16 @@ end # test IOTO.transcription_variable (Single argument) @testset "IOTO.transcription_variable (Single)" begin @test IOTO.transcription_variable(y) == a - @test IOTO.transcription_variable(x, label = All) == [a, b, c, d, e, f] - @test IOTO.transcription_variable(x0) == b - @test IOTO.transcription_variable(f2, ndarray = true) == ones(2, 2) + @test IOTO.transcription_variable(x, label = All) == [a b; c d; e f] + @test IOTO.transcription_variable(x0) == a + @test IOTO.transcription_variable(f2) == ones(2, 2) end # test transformation_variable extension @testset "transformation_variable" begin @test transformation_variable(y, tb, label = All) == a - @test transformation_variable(x, tb, label = All) == [a, b, c, d, e, f] - @test transformation_variable(x, tb) == [a, b, c, d, e, f] - @test transformation_variable(x0, tb) == b + @test transformation_variable(x, tb, label = All) == [a b; c d; e f] + @test transformation_variable(x, tb) == [a b; e f] + @test transformation_variable(x0, tb) == a end # test variable_supports for infinite variable with 2 inputs @testset "variable_supports (Backend, Infinite)" begin @@ -197,49 +179,54 @@ end dvref = dispatch_variable_ref(x) delete!(data.infvar_mappings, x) @test_throws ErrorException InfiniteOpt.variable_supports(dvref, tb) - data.infvar_mappings[x] = [a, b, c, d, e, f] - # test supports are empty - lookups = Dict{Vector{Float64}, Int}([0, 0, 0] => 1, [0.5, 0, 0] => 2, [1, 0, 0] => 3, - [0, 1, 1] => 4, [0.5, 1, 1] => 5, [1, 1, 1] => 6) - data.infvar_lookup[x] = lookups - expected = [(0., [0., 0.]), (0.5, [0., 0.]), (1., [0., 0.]), - (0., [1., 1.]), (0.5, [1., 1.]), (1., [1., 1.])] - @test InfiniteOpt.variable_supports(dvref, tb) == expected + data.infvar_mappings[x] = [a b; c d; e f] + x_supps = [(0.0, [0.0, 0.0]) (0.0, [1., 1.]); + (0.5, [0.0, 0.0]) (0.5, [1., 1.]); + (1.0, [0.0, 0.0]) (1.0, [1., 1.])] + data.infvar_supports[x] = x_supps # test normal - @test InfiniteOpt.variable_supports(dvref, tb, label = All) == expected - @test InfiniteOpt.variable_supports(dvref, tb, label = InternalLabel) == expected[[2, 5]] - # test ndarray - expected = permutedims([(0., [0., 0.]) (0.5, [0., 0.]) (1., [0., 0.]); - (0., [1., 1.]) (0.5, [1., 1.]) (1., [1., 1.])], (2, 1)) - @test InfiniteOpt.variable_supports(dvref, tb, ndarray = true) == expected[[1, 3], :] - @test InfiniteOpt.variable_supports(dvref, tb, ndarray = true, label = All) == expected + @test InfiniteOpt.variable_supports(dvref, tb) == x_supps[[1, 3], :] + @test InfiniteOpt.variable_supports(dvref, tb, label = All) == x_supps + @test isempty(InfiniteOpt.variable_supports(dvref, tb, label = InternalLabel)) # test with semi-infinite variable - lookups = Dict{Vector{Float64}, Int}([0, 1] => 1, [0.5, 1] => 2, [1, 1] => 3) - data.infvar_lookup[xrv] = lookups + data.infvar_supports[xrv] = [(0., 1.), (0.5, 1.), (1., 1.)] dvref = dispatch_variable_ref(xrv) - expected = [(0., 1.), (0.5, 1.), (1., 1.)] - @test InfiniteOpt.variable_supports(dvref, tb) == expected - @test InfiniteOpt.variable_supports(dvref, tb, label = InternalLabel) == [expected[2]] + @test InfiniteOpt.variable_supports(dvref, tb) == data.infvar_supports[xrv][[1, 3]] + @test InfiniteOpt.variable_supports(dvref, tb, label = All) == data.infvar_supports[xrv] end # test variable_supports for infinite parameter functions with 2 inputs @testset "variable_supports (Backend, Parameter Function)" begin - # test normal df1 = dispatch_variable_ref(f1) @test InfiniteOpt.variable_supports(df1, tb) == [(0.,), (1.,)] @test InfiniteOpt.variable_supports(df1, tb, label = All) == [(0.,), (0.5,), (1.,)] - # test ndarray df2 = dispatch_variable_ref(f2) - @test InfiniteOpt.variable_supports(df1, tb, label = All, ndarray = true) == [(0.,), (0.5,), (1.,)] - @test InfiniteOpt.variable_supports(df2, tb, ndarray = true) isa Array + @test InfiniteOpt.variable_supports(df2, tb) == data.infvar_supports[x][[1, 3], :] + @test InfiniteOpt.variable_supports(df2, tb, label = All) == data.infvar_supports[x] end # test supports for infinite variable @testset "supports (Infinite)" begin - @test supports(x, label = InternalLabel) == [(0.5, [0., 0.]), (0.5, [1., 1.])] - @test supports(xrv) == [(0., 1.), (0.5, 1.), (1., 1.)] + @test supports(x) == data.infvar_supports[x][[1, 3], :] + @test supports(xrv) == [(0., 1.), (1., 1.)] @test supports(f1, label = All) == [(0.,), (0.5,), (1.,)] end # test lookup_by_support (infinite vars) @testset "lookup_by_support (Infinite)" begin + # setup + lookups = Dict{Vector{Float64}, VariableRef}( + [0, 0, 0] => a, + [0.5, 0, 0] => c, + [1, 0, 0] => e, + [0, 1, 1] => b, + [0.5, 1, 1] => d, + [1, 1, 1] => f + ) + data.infvar_lookup[x] = lookups + lookups = Dict{Vector{Float64}, VariableRef}( + [0, 1] => b, + [0.5, 1] => d, + [1, 1] => f + ) + data.infvar_lookup[xrv] = lookups # test errors @variable(m, x2, Infinite(par)) @test_throws ErrorException IOTO.lookup_by_support(x2, tb, [0.]) @@ -247,9 +234,9 @@ end @test_throws ErrorException IOTO.lookup_by_support(xrv, tb, [0., 0., 0.]) # test normal @test IOTO.lookup_by_support(x, tb, [0., 0., 0.]) == a - @test IOTO.lookup_by_support(x, tb, [0., 1., 1.]) == d + @test IOTO.lookup_by_support(x, tb, [0., 1., 1.]) == b @test IOTO.lookup_by_support(x, tb, [1., 1., 1.]) == f - @test IOTO.lookup_by_support(xrv, tb, [0., 1.]) == d + @test IOTO.lookup_by_support(xrv, tb, [0., 1.]) == b @test IOTO.lookup_by_support(xrv, tb, [1., 1.]) == f end # test lookup_by_support (infinite parameter functions) @@ -263,7 +250,7 @@ end @variable(m, z2) @test_throws ErrorException IOTO.lookup_by_support(z2, tb, [0.]) # test normal - @test IOTO.lookup_by_support(x0, tb, [0., 0., 0.]) == b + @test IOTO.lookup_by_support(x0, tb, [0., 0., 0.]) == a @test IOTO.lookup_by_support(y, tb, [0., 0., 1.]) == a end # test internal_semi_infinite_variable @@ -308,17 +295,13 @@ end @test_throws ErrorException IOTO.transcription_variable(meas1, tb) @test_throws ErrorException IOTO.transcription_variable(meas2, tb) # test normal - data.measure_mappings[meas1] = [-2 * zero(AffExpr)] - data.measure_support_labels[meas1] = [s1] + + data.measure_mappings[meas1] = fill(-2 * zero(AffExpr)) data.measure_mappings[meas2] = [a^2 + c^2 - 2a, b^2 + d^2 - 2a] - data.measure_support_labels[meas2] = [s1, s2] @test IOTO.transcription_variable(meas1, tb) == -2 * zero(AffExpr) expected = [a^2 + c^2 - 2a, b^2 + d^2 - 2a] @test IOTO.transcription_variable(meas2, tb) == [expected[1]] @test IOTO.transcription_variable(meas2, tb, label = All) == expected - # test ndarray - @test IOTO.transcription_variable(meas2, tb, ndarray = true) == [expected[1]] - @test IOTO.transcription_variable(meas2, tb, label = All, ndarray = true) == expected end # test lookup_by_support @testset "lookup_by_support" begin @@ -326,8 +309,9 @@ end @test_throws ErrorException IOTO.lookup_by_support(meas1, tb, Float64[]) @test_throws ErrorException IOTO.lookup_by_support(meas2, tb, [0.]) # test normal + expected = [a^2 + c^2 - 2a, b^2 + d^2 - 2a] data.measure_lookup[meas1] = Dict(Float64[] => 1) - data.measure_lookup[meas2] = Dict{Vector{Float64}, Int}([0] => 1, [1] => 2) + data.measure_lookup[meas2] = Dict([0.] => 1, [1.] => 2) @test IOTO.lookup_by_support(meas1, tb, Float64[]) == -2 * zero(AffExpr) @test IOTO.lookup_by_support(meas2, tb, [1.]) == b^2 + d^2 - 2a end @@ -337,18 +321,15 @@ end dvref = dispatch_variable_ref(meas1) delete!(data.measure_mappings, meas1) @test_throws ErrorException InfiniteOpt.variable_supports(dvref, tb) - data.measure_mappings[meas1] = [-2 * zero(AffExpr)] - # test supports are empty + data.measure_mappings[meas1] = fill(-2 * zero(AffExpr)) + data.measure_supports[meas1] = fill(()) + data.measure_supports[meas2] = [(0.,), (1.,)] + # test normal @test InfiniteOpt.variable_supports(dvref, tb) == () dvref2 = dispatch_variable_ref(meas2) @test InfiniteOpt.variable_supports(dvref2, tb, label = All) == [(0.,), (1.,)] - # test normal @test InfiniteOpt.variable_supports(dvref, tb) == () @test InfiniteOpt.variable_supports(dvref2, tb) == [(0.,)] - # test ndarray - @test InfiniteOpt.variable_supports(dvref, tb, ndarray = true) == [()] - @test InfiniteOpt.variable_supports(dvref2, tb, ndarray = true) == [(0.,)] - @test InfiniteOpt.variable_supports(dvref2, tb, label = All, ndarray = true) == [(0.,), (1.,)] end end @@ -360,33 +341,8 @@ end @infinite_parameter(m, par in [0, 1], supports = [0, 1], derivative_method = OrthogonalCollocation(3)) @variable(m, y, Infinite(par)) - d1 = @deriv(y, par) + d1 = deriv(y, par) tb = m.backend - # test _temp_parameter_ref - @testset "_temp_parameter_ref" begin - @test IOTO._temp_parameter_ref(m, index(par)) == dispatch_variable_ref(par) - @test IOTO._temp_parameter_ref(m, index(pars[1]).object_index) == dispatch_variable_ref(pars[1]) - end - # test _collected_supports - @testset "_collected_supports" begin - # independent parameters - expected = Float64[0., 1., NaN] - @test isequal(IOTO._collected_supports(dispatch_variable_ref(par)), expected) - # dependent parameters - expected = sort!([Float64[0., 0.], Float64[1., 1.], Float64[NaN, NaN]]) - @test isequal(sort!(IOTO._collected_supports(dispatch_variable_ref(pars[1]))), expected) - end - # test _collected_support_labels - @testset "_collected_support_labels" begin - # independent parameters - supps = Float64[0., 1., NaN] - expected = [Set([UserDefined]), Set([UserDefined]), Set{DataType}()] - @test isequal(IOTO._collected_support_labels(dispatch_variable_ref(par), supps), expected) - # dependent parameters - supps = [Float64[0., 0.], Float64[1., 1.], Float64[NaN, NaN]] - expected = [Set([UniformGrid]), Set([UniformGrid]), Set{DataType}()] - @test isequal(IOTO._collected_support_labels(dispatch_variable_ref(pars[1]), supps), expected) - end # test set_parameter_supports @testset "set_parameter_supports" begin add_supports(par, 0.6, label = InternalLabel) @@ -421,20 +377,13 @@ end @test IOTO.index_to_support(tb, first(CartesianIndices((1:2, 1:5)))) isa Vector @test isnan(IOTO.index_to_support(tb, last(IOTO.support_index_iterator(tb, [1])))[3]) end - # test index_to_labels - @testset "index_to_labels" begin - idxs = CartesianIndices((1:2, 1:5)) - @test IOTO.index_to_labels(tb, first(idxs)) == Set([UserDefined, UniformGrid]) - @test IOTO.index_to_labels(tb, idxs[3]) == Set([UniformGrid, InternalGaussLobatto]) - @test IOTO.index_to_labels(tb, last(IOTO.support_index_iterator(tb, Int[]))) == Set{DataType}() - end end # Test the expression mappings @testset "Expression Queries" begin # initialize tbe needed info m = InfiniteModel() - @infinite_parameter(m, pars[1:2] in [0, 1]) + @infinite_parameter(m, pars[1:2] in [0, 1], supports = [0]) @infinite_parameter(m, par in [0, 1], supports = [0]) @variable(m, x, Infinite(par, pars)) @finite_parameter(m, finpar == 42) @@ -451,21 +400,14 @@ end @variable(tb.model, d) # transcribe the variables and measures data = IOTO.transcription_data(tb) - data.has_internal_supports = true - s1 = Set([UserDefined]) - s2 = Set([InternalLabel]) data.finvar_mappings[y] = a - data.finvar_mappings[x0] = b - data.infvar_mappings[x] = [b, c, d] - data.infvar_support_labels[x] = [s1, s2, s1] - data.measure_mappings[meas1] = [-2 * zero(AffExpr)] - data.measure_support_labels[meas1] = [s1] + data.finvar_mappings[x0] = a + data.infvar_mappings[x] = [a; b;;] + data.measure_mappings[meas1] = fill(-2 * zero(AffExpr)) data.measure_mappings[meas2] = [a^2 + c^2 - 2a, b^2 + d^2 - 2a] - data.measure_support_labels[meas2] = [s1, s2] - lookups = Dict{Vector{Float64}, Int}([0, 0, 0] => 1, [0, 0, 1] => 2, [1, 1, 1] => 3) - data.infvar_lookup[x] = lookups + data.infvar_lookup[x] = Dict([0, 0, 0] => a, [1, 0, 0] => b) data.measure_lookup[meas1] = Dict(Float64[] => 1) - data.measure_lookup[meas2] = Dict{Vector{Float64}, Int}([0] => 1, [1] => 2) + data.measure_lookup[meas2] = Dict([0] => 1, [1] => 2) @test IOTO.set_parameter_supports(tb, m) isa Nothing # test IOTO.transcription_expression in accordance with the methods defined in transcribe.jl @testset "IOTO.transcription_expression (Fallback)" begin @@ -473,28 +415,27 @@ end end # test transcription expression for infinite variables with 3 args @testset "IOTO.transcription_expression (Infinite Variable)" begin - @test IOTO.transcription_expression(x, tb, [0., 1., 0.]) == c + @test IOTO.transcription_expression(x, tb, [0., 0., 0.]) == a @test IOTO.transcription_expression(meas1, tb, [0., 0., 1.]) == -2 * zero(AffExpr) - @test IOTO.transcription_expression(f, tb, [0., 1., 0.]) == 1 + @test IOTO.transcription_expression(f, tb, [0., 0., 1.]) == 1 end # test transcription expression for semi_infinite variables with 3 args @testset "IOTO.transcription_expression (Semi-Infinite Variable)" begin # semi_infinite of parameter function rv = add_variable(m, build_variable(error, f, Dict(1=>1.)), add_support = false) - @test IOTO.transcription_expression(rv, tb, [0., 1., 0.]) == 1 + @test IOTO.transcription_expression(rv, tb, [0., 0., 1.]) == 1 # semi_infinite of infinite variable rv = add_variable(m, build_variable(error, x, Dict(1=>1.)), add_support = false) - data.infvar_mappings[rv] = [b, c] - lookups = Dict{Vector{Float64}, Int}([0, 0] => 1, [1, 0] => 2) - data.infvar_lookup[rv] = lookups - @test IOTO.transcription_expression(rv, tb, [1., 0., 0.]) == c + data.infvar_mappings[rv] = [b] + data.infvar_lookup[rv] = Dict([0, 0] => b) + @test IOTO.transcription_expression(rv, tb, [0., 0., 1.]) == b end # test transcription expression for finite variables with 3 args @testset "IOTO.transcription_expression (Finite Variable)" begin - @test IOTO.transcription_expression(x0, tb, [0., 1., 0.]) == b - @test IOTO.transcription_expression(y, tb, [0., 0., 1.]) == a + @test IOTO.transcription_expression(x0, tb, [0., 0., 1.]) == a + @test IOTO.transcription_expression(y, tb, [0., 0., 0.]) == a end # test transcription expression for infinite parameters with 3 args @testset "IOTO.transcription_expression (Infinite Parameter)" begin @@ -507,73 +448,66 @@ end end # test transcription expression for AffExprs with 3 args @testset "IOTO.transcription_expression (AffExpr)" begin - @test IOTO.transcription_expression(x0 - y + 2x - 2.3, tb, [1., 1., 1.]) == b - a + 2d - 2.3 + @test IOTO.transcription_expression(x0 - 2y + 2x - 2.3, tb, [0., 0., 1.]) == -a + 2b - 2.3 end # test transcription expression for QuadExprs with 3 args @testset "IOTO.transcription_expression (QuadExpr)" begin # test normal expr = meas2 - 3y^2 - x0 - 2.3 - expected = b^2 + d^2 - 2a - 3a^2 - b - 2.3 - @test IOTO.transcription_expression(expr, tb, [1., 1., 1.]) == expected + expected = b^2 + d^2 - 2a - 3a^2 - a - 2.3 + @test IOTO.transcription_expression(expr, tb, [0., 0., 1.]) == expected # test becomes a nonlinear expression expr = meas2 * x0 - expected = +((-2.0 * a + b * b + d * d) * b, 0.0) - @test isequal(IOTO.transcription_expression(expr, tb, [1., 1., 1.]), expected) + expected = +((-2.0 * a + b * b + d * d) * a, 0.0) + @test isequal(IOTO.transcription_expression(expr, tb, [0., 0., 1.]), expected) end - # test transcription expression for NonlinearExprs with 3 args - @testset "IOTO.transcription_expression (NonlinearExpr)" begin - @test isequal(IOTO.transcription_expression(sin(y), tb, [1., 1., 1.]), sin(a)) + # test transcription expression for GenericNonlinearExprs with 3 args + @testset "IOTO.transcription_expression (GenericNonlinearExpr)" begin + @test isequal(IOTO.transcription_expression(sin(y), tb, [0., 0., 1.]), sin(a)) end # test transcription expression for numbers with 3 args @testset "IOTO.transcription_expression (Real)" begin expected = zero(AffExpr) + 42 - @test IOTO.transcription_expression(42, tb, [1., 1., 1.]) == expected + @test IOTO.transcription_expression(42, tb, [0., 0., 1.]) == expected end # test transcription expression for Exprs with 2 args @testset "IOTO.transcription_expression (Expr 2 Args)" begin # test infinite expr expr = meas2 - 3y^2 - x0 - 2.3 - expected = [-2a^2 + c^2 - 2a - b - 2.3, -3a^2 + b^2 + d^2 - 2a - b - 2.3] + expected = [-2a^2 + c^2 - 2a - a - 2.3, -3a^2 + b^2 + d^2 - 2a - a - 2.3] @test IOTO.transcription_expression(expr, tb, label = All) == expected - @test IOTO.transcription_expression(expr, tb, label = All, ndarray = true) == expected - expected = [-2a^2 + c^2 - 2a - b - 2.3] - @test IOTO.transcription_expression(expr, tb) == expected - @test IOTO.transcription_expression(expr, tb, ndarray = true) == expected + @test IOTO.transcription_expression(expr, tb) == expected[1:1] # test finite expr expr = 2x0 -y - expected = 2b- a + expected = 2a- a @test IOTO.transcription_expression(expr, tb) == expected - @test IOTO.transcription_expression(expr, tb, ndarray = true) == [expected] # test NonlinearExpr - @test isequal(IOTO.transcription_expression(sin(x0), tb), sin(b)) + @test isequal(IOTO.transcription_expression(sin(x0), tb), sin(a)) end # test transcription expression for variables with 2 args @testset "IOTO.transcription_expression (Variable 2 Args)" begin - @test IOTO.transcription_expression(x0, tb) == b - @test IOTO.transcription_expression(x, tb) == [b, d] - @test IOTO.transcription_expression(x, tb, label = All) == [b, c, d] + @test IOTO.transcription_expression(x0, tb) == a + @test IOTO.transcription_expression(x, tb) == [a;;] + @test IOTO.transcription_expression(x, tb, label = All) == [a; b;;] end # test transcription expression with 1 argument @testset "IOTO.transcription_expression (1 Arg)" begin - @test IOTO.transcription_expression(x0) == b - @test IOTO.transcription_expression(x0 - y) == b - a + @test IOTO.transcription_expression(x0) == a + @test IOTO.transcription_expression(x0 - 2y) == a - 2a @test IOTO.transcription_expression(zero(QuadExpr) + 2) == zero(AffExpr) + 2 end # test transformation_expression @testset "transformation_expression" begin - @test transformation_expression(x0) == b - @test transformation_expression(x0 - y) == b - a + @test transformation_expression(x0) == a + @test transformation_expression(x0 - 2y) == a - 2a @test transformation_expression(zero(QuadExpr) + 2) == zero(AffExpr) + 2 end # test expression_supports @testset "expression_supports" begin @test supports(x0) == () @test supports(x0 - y) == () - @test supports(x0 - y, ndarray = true) == [()] @test supports(meas2 + y) == [(0.,)] - @test supports(meas2 + y, ndarray = true) == [(0.,)] @test supports(meas2 + y, label = All) == [(0.,), (1.,)] - @test supports(meas2 + y, label = All, ndarray = true) == [(0.,), (1.,)] end end @@ -608,23 +542,18 @@ end @test_throws ErrorException IOTO.transcription_constraint(c1, tb) # test normal data.constr_mappings[c1] = [tc1, tc2] - data.constr_support_labels[c1] = [s1, s2] @test IOTO.transcription_constraint(c1, tb, label = All) == [tc1, tc2] @test IOTO.transcription_constraint(c1, tb) == [tc1] - @test IOTO.transcription_constraint(c1, tb, label = All, ndarray = true) == [tc1, tc2] - @test IOTO.transcription_constraint(c1, tb, ndarray = true) == [tc1] + @test IOTO.transcription_constraint(c1, tb, label = All) == [tc1, tc2] # test error @test_throws ErrorException IOTO.transcription_constraint(c2, tb) # test normal - data.constr_mappings[c2] = [tc3] - data.constr_support_labels[c2] = [s1] + data.constr_mappings[c2] = fill(tc3) @test IOTO.transcription_constraint(c2, tb) == tc3 - @test IOTO.transcription_constraint(c2, tb, ndarray = true) == [tc3] # test error @test_throws ErrorException IOTO.transcription_constraint(c3, tb) # test normal - data.constr_mappings[c3] = [tc4] - data.constr_support_labels[c3] = [s1] + data.constr_mappings[c3] = fill(tc4) @test IOTO.transcription_constraint(c3, tb) == tc4 end # test IOTO.transcription_constraint (Single argument) @@ -648,17 +577,13 @@ end data.constr_supports[c1] = [(0.,), (1.,)] @test InfiniteOpt.constraint_supports(c1, tb) == [(0.,)] @test InfiniteOpt.constraint_supports(c1, tb, label = All) == [(0.,), (1.,)] - @test InfiniteOpt.constraint_supports(c1, tb, ndarray = true) == [(0.,)] - @test InfiniteOpt.constraint_supports(c1, tb, label = All, ndarray = true) == [(0.,), (1.,)] # test finite - data.constr_supports[c3] = [()] + data.constr_supports[c3] = fill(()) @test InfiniteOpt.constraint_supports(c3, tb) == () - @test InfiniteOpt.constraint_supports(c3, tb, ndarray = true) == [()] end # test supports @testset "supports" begin @test supports(c1, label = All) == [(0.,), (1.,)] - @test supports(c1, label = All, ndarray = true) == [(0.,), (1.,)] @test supports(c1, label = InternalLabel) == [(1.,)] @test supports(c3) == () end diff --git a/test/TranscriptionOpt/transcribe.jl b/test/TranscriptionOpt/transcribe.jl index 8f8728b6..93fc6841 100644 --- a/test/TranscriptionOpt/transcribe.jl +++ b/test/TranscriptionOpt/transcribe.jl @@ -58,19 +58,24 @@ @test isnan(IOTO._format_infinite_info(var, [0.]).start) @test IOTO._format_infinite_info(var, [0.]).integer end + # test name formatting + @testset "_make_var_name" begin + @test IOTO._make_var_name("x", [1, 2, 3], (0, [1, 1]), (1, 2)) == "x(0, [1, 1])" + @test IOTO._make_var_name("x", [1, 2, 3, 4, 5], (0, [0, 0, 0, 0]), (1, 2)) == "x[1, 2]" + end # test transcribe_infinite_variables! @testset "transcribe_infinite_variables!" begin @test isa(IOTO.transcribe_infinite_variables!(tb, m), Nothing) @test length(IOTO.transcription_data(tb).infvar_mappings) == 2 @test IOTO.transcription_variable(x, tb) isa Vector{VariableRef} - @test IOTO.transcription_variable(y, tb) isa Vector{VariableRef} - @test name(IOTO.transcription_variable(x, tb)[1]) == "x(support: 1)" - @test name(IOTO.transcription_variable(y, tb)[3]) == "y(support: 3)" + @test IOTO.transcription_variable(y, tb) isa Matrix{VariableRef} + @test name(IOTO.transcription_variable(x, tb)[1]) == "x(0.0)" + @test name(IOTO.transcription_variable(y, tb)[2, 1]) in ["y(1.0, [0.0, 0.0])", "y(1.0, [1.0, 1.0])"] @test has_lower_bound(IOTO.transcription_variable(x)[1]) @test is_binary(IOTO.transcription_variable(y, tb)[2]) @test is_fixed(IOTO.transcription_variable(y, tb)[4]) @test is_integer(IOTO.transcription_variable(x, tb)[2]) - @test sort!(start_value.(IOTO.transcription_variable(y, tb))) == [0., 1, 2, 3] + @test sort!(vec(start_value.(IOTO.transcription_variable(y, tb)))) == [0., 1, 2, 3] @test supports(x) == [(0,), (1,)] @test length(supports(y)) == 4 end @@ -99,14 +104,15 @@ @test length(IOTO.transcription_data(tb).infvar_mappings) == 6 @test num_derivatives(m) == 4 @test IOTO.transcription_variable(dx, tb) isa Vector{VariableRef} - @test IOTO.transcription_variable(dy, tb) isa Vector{VariableRef} + @test IOTO.transcription_variable(dy, tb) isa Matrix{VariableRef} @test IOTO.transcription_variable(dx3, tb) isa Vector{VariableRef} - @test name(IOTO.transcription_variable(dx, tb)[1]) == "d/dpar[x(par)](support: 1)" - @test name(IOTO.transcription_variable(dx3, tb)[1]) == "d^3/dpar^3[x(par)](support: 1)" - @test name(IOTO.transcription_variable(deriv(dx, par), tb)[1]) == "d²/dpar²[x(par)](support: 1)" - @test name(IOTO.transcription_variable(dy, tb)[3]) == (Sys.iswindows() ? "d/dpar[y(par, pars)](support: 3)" : "∂/∂par[y(par, pars)](support: 3)") + @test name(IOTO.transcription_variable(dx, tb)[1]) == "d/dpar[x(par)](0.0)" + @test name(IOTO.transcription_variable(dx3, tb)[1]) == "d^3/dpar^3[x(par)](0.0)" + @test name(IOTO.transcription_variable(deriv(dx, par), tb)[1]) == "d²/dpar²[x(par)](0.0)" + possible = [Sys.iswindows() ? "d/dpar[y(par, pars)](1.0, [$i, $i])" : "∂/∂par[y(par, pars)](1.0, [$i, $i])" for i in [0.0, 1.0]] + @test name(IOTO.transcription_variable(dy, tb)[2, 1]) in possible @test has_lower_bound(IOTO.transcription_variable(dx, tb)[1]) - @test sort!(start_value.(IOTO.transcription_variable(dy, tb))) == [0., 1, 2, 3] + @test sort!(vec(start_value.(IOTO.transcription_variable(dy, tb)))) == [0., 1, 2, 3] @test supports(dx) == [(0,), (1,)] @test length(supports(dy)) == 4 end @@ -170,8 +176,8 @@ @test length(IOTO.transcription_data(tb).finvar_mappings) == 4 @test IOTO.transcription_variable(x0, tb) == IOTO.lookup_by_support(x, tb, [0.]) @test IOTO.transcription_variable(y0, tb) == IOTO.lookup_by_support(y, tb, [0., 0., 0.]) - @test name(IOTO.transcription_variable(x0, tb)) == "x(support: 1)" - @test name(IOTO.transcription_variable(y0, tb))[1:end-2] == "y(support: " + @test name(IOTO.transcription_variable(x0, tb)) == "x(0.0)" + @test name(IOTO.transcription_variable(y0, tb))[1:8] == "y(0.0, [" @test lower_bound(IOTO.transcription_variable(x0, tb)) == 0 @test is_integer(IOTO.transcription_variable(x0, tb)) @test lower_bound(IOTO.transcription_variable(y0, tb)) == 0 @@ -355,7 +361,7 @@ end @test IOTO.transcription_constraint(UpperBoundRef(yf)) == UpperBoundRef(yft) @test IOTO.transcription_constraint(BinaryRef(z)) == BinaryRef(zt) # test constraint transcriptions - @test IOTO.transcription_constraint(c1) isa Vector{ConstraintRef} + @test IOTO.transcription_constraint(c1) isa Matrix{ConstraintRef} @test length(IOTO.transcription_constraint(c1)) == 6 @test constraint_object(IOTO.transcription_constraint(c2)).func == yt[1] - zt^2 xf = IOTO.lookup_by_support(x, tb, [1., 1., 1.]) @@ -378,13 +384,13 @@ end @test supports(BinaryRef(z)) == () # test the constraint supports expected = [([0., 0.], 0.), ([0., 0.], 0.5), ([0., 0.], 1.), ([1., 1.], 0.), ([1., 1.], 0.5), ([1., 1.], 1.)] - @test sort(supports(c1)) == expected + @test sort(vec(supports(c1))) == expected @test supports(c2) == (0.,) @test supports(c3) == ([1., 1.], 1.) @test supports(c4) == [(0.0,), (0.5,)] @test supports(c5) == () - @test sort(supports(c6)) == expected - @test sort(supports(c7)) == expected + @test sort(vec(supports(c6))) == expected + @test sort(vec(supports(c7))) == expected @test supports(c8) == () end end @@ -429,7 +435,7 @@ end # main test @test IOTO.transcribe_variable_collocation_restictions!(tb, m) isa Nothing @test num_constraints(tb.model, count_variable_in_set_constraints = false) == 3 * 3 - yt = IOTO.transcription_variable(y, label = All, ndarray = true) + yt = IOTO.transcription_variable(y, label = All) cons = all_constraints(tb.model, include_variable_in_set_constraints = false) @test jump_function(constraint_object(first(cons))) == yt[7, 1] - yt[6, 1] # test assertion error @@ -498,9 +504,9 @@ end @test start_value(wt) == 1. # test infinite variables @test IOTO.transcription_variable(x) isa Vector{VariableRef} - @test IOTO.transcription_variable(y) isa Vector{VariableRef} - @test name(IOTO.transcription_variable(x)[1]) == "x(support: 1)" - @test name(IOTO.transcription_variable(y)[3]) == "y(support: 3)" + @test IOTO.transcription_variable(y) isa Matrix{VariableRef} + @test name(IOTO.transcription_variable(x)[1]) == "x(0.0)" + @test name(IOTO.transcription_variable(y)[3])[1:8] == "y(0.0, [" @test has_lower_bound(IOTO.transcription_variable(x)[1]) @test is_binary(IOTO.transcription_variable(y)[2]) @test is_fixed(IOTO.transcription_variable(y)[4]) @@ -511,8 +517,8 @@ end # test point variables @test IOTO.transcription_variable(x0) isa VariableRef @test IOTO.transcription_variable(y0) isa VariableRef - @test name(IOTO.transcription_variable(x0)) == "x(support: 1)" - @test name(IOTO.transcription_variable(y0))[1:end-2] == "y(support: " + @test name(IOTO.transcription_variable(x0)) == "x(0.0)" + @test name(IOTO.transcription_variable(y0))[1:8] == "y(0.0, [" @test has_lower_bound(IOTO.transcription_variable(x0)) @test is_integer(IOTO.transcription_variable(x0)) @test has_lower_bound(IOTO.transcription_variable(y0)) @@ -544,8 +550,8 @@ end @test constraint_object(IOTO.transcription_constraint(c3)).func == xt[1] - 2wt + xt[2] + zt - d2t[1] - d2t[2] @test constraint_object(IOTO.transcription_constraint(c6)).func == [zt, wt] @test IOTO.transcription_constraint(c5) isa Vector{ConstraintRef} - @test name(IOTO.transcription_constraint(c2)) == "c2(support: 1)" - @test name(IOTO.transcription_constraint(c1)) == "c1(support: 1)" + @test name(IOTO.transcription_constraint(c2)) == "c2" + @test name(IOTO.transcription_constraint(c1)) == "c1[1, 1]" @test supports(c1) == (0., [0., 0.]) @test IOTO.transcription_constraint(c7) isa ConstraintRef @test isequal(constraint_object(IOTO.transcription_constraint(c7)).func, gr(zt) - 2.) @@ -572,6 +578,7 @@ end m = InfiniteModel() @variable(m, y >= 0) @objective(m, Min, y) + @constraint(m, y^2 <= 42) tb = m.backend @test IOTO.build_transcription_backend!(tb, m) isa Nothing @test IOTO.transcription_variable(y) isa VariableRef diff --git a/test/backend_mappings.jl b/test/backend_mappings.jl index 46eb6fbb..92417f82 100644 --- a/test/backend_mappings.jl +++ b/test/backend_mappings.jl @@ -63,7 +63,6 @@ end @testset "transformation_variable" begin # test normal usage @test transformation_variable(x, label = All) == IOTO.transcription_variable(x, label = All) - @test transformation_variable(x, label = All, ndarray = true) == IOTO.transcription_variable(x, label = All, ndarray = true) @test transformation_variable(x0) == IOTO.transcription_variable(x0) @test transformation_variable(z) == IOTO.transcription_variable(z) @test transformation_variable(d1, label = InternalLabel) == IOTO.transcription_variable(d1, label = InternalLabel) @@ -85,7 +84,6 @@ end @testset "supports (Variables)" begin # test normal usage @test supports(x) == [(0.,), (1.,)] - @test supports(x, ndarray = true) == [(0.,), (1.,)] @test supports(x, label = All) == [(0.,), (0.5,), (1.,)] @test supports(meas1) == () @test supports(d1, label = InternalLabel) == [(0.5,)] @@ -97,7 +95,6 @@ end @test transformation_expression(x, label = All) == IOTO.transcription_variable(x, label = All) @test transformation_expression(z) == IOTO.transcription_variable(z) @test transformation_expression(x0) == IOTO.transcription_variable(x0) - @test transformation_expression(x0, ndarray = true) == IOTO.transcription_variable(x0) # test expression without variables expr = zero(JuMP.GenericAffExpr{Float64, GeneralVariableRef}) + 42 @test transformation_expression(expr) == zero(AffExpr) + 42 @@ -105,7 +102,6 @@ end xt = IOTO.transcription_variable(x, label = All) zt = IOTO.transcription_variable(z) @test transformation_expression(x^2 + z) == [xt[1]^2 + zt, xt[3]^2 + zt] - @test transformation_expression(x^2 + z, ndarray = true) == [xt[1]^2 + zt, xt[3]^2 + zt] @test transformation_expression(x^2 + z, label = All) == [xt[1]^2 + zt, xt[2]^2 + zt, xt[3]^2 + zt] @test transformation_expression(2z - 3) == 2zt - 3 @test transformation_expression(2 * f) == [zero(AffExpr), zero(AffExpr) + sin(1) * 2] @@ -128,7 +124,6 @@ end # test normal usage @test supports(x) == [(0.,), (1.,)] @test supports(2x - z + x0 + 43) == [(0.,), (1.,)] - @test supports(2x - z + x0 + 43, ndarray = true) == [(0.,), (1.,)] expr = zero(JuMP.GenericAffExpr{Float64, GeneralVariableRef}) + 42 @test supports(expr, label = All) == () end @@ -137,7 +132,6 @@ end # test normal usage @test transformation_constraint(c1) == IOTO.transcription_constraint(c1) @test transformation_constraint(c2, label = All) == IOTO.transcription_constraint(c2, label = All) - @test transformation_constraint(c2, label = All, ndarray = true) == IOTO.transcription_constraint(c2, label = All, ndarray = true) @test transformation_constraint(c3) == IOTO.transcription_constraint(c3) # test deprecation @test (@test_deprecated optimizer_model_constraint(c1)) == transformation_constraint(c1) @@ -149,7 +143,6 @@ end # test normal usage @test InfiniteOpt.constraint_supports(c1, tb) == [(0.,), (1.,)] @test InfiniteOpt.constraint_supports(c1, tb, label = All) == [(0.,), (0.5,), (1.,)] - @test InfiniteOpt.constraint_supports(c1, tb, label = All, ndarray = true) == [(0.,), (0.5,), (1.,)] # test fallback @test_throws ErrorException InfiniteOpt.constraint_supports(c1, TestBackend()) end diff --git a/test/results.jl b/test/results.jl index cf3633db..e33aaecb 100644 --- a/test/results.jl +++ b/test/results.jl @@ -234,7 +234,6 @@ end @testset "JuMP.value" begin @test value(inf) == [2., 2.] @test value(inf, label = All) == [2., 1., 2.] - @test value(inf, label = All, ndarray = true) == [2., 1., 2.] @test value(d1) == [2., 2.] @test value(d1, label = All) == [2., 1., 2.] @test value(g) == 1. @@ -322,7 +321,7 @@ end # test map_value @testset "map_value" begin @test InfiniteOpt.map_value(meas1, tb) == 4. - @test InfiniteOpt.map_value(meas2, tb) == [0., -3.] + @test InfiniteOpt.map_value(meas2, tb) == [2., -5.] @test InfiniteOpt.map_value(3g - 1, tb) == 2. @test InfiniteOpt.map_value(inf^2 + g, tb) == [5., 1.] @test InfiniteOpt.map_value(zero(AffExpr) + 1, tb) == 1. @@ -330,10 +329,9 @@ end # test value @testset "JuMP.value" begin @test value(meas1, label = All) == 4. - @test value(meas2, label = UserDefined) == [0., -3.] + @test value(meas2, label = UserDefined) == [2., -5.] @test value(3g - 1) == 2. @test value(inf * inf + g - 2) == [3., -1.] - @test value(inf * inf + g - 2, ndarray = true) == [3., -1.] @test value(zero(JuMP.GenericAffExpr{Float64, GeneralVariableRef}) - 42) == -42. @test value(sin(g)) == sin(1) @test value(GenericNonlinearExpr{GeneralVariableRef}(:sin, Any[0])) == 0 @@ -396,7 +394,6 @@ end @testset "JuMP.value" begin @test value(c1) == 1. @test value(c2, label = UserDefined) == [-1., 0.] - @test value(c2, label = UserDefined, ndarray = true) == [-1., 0.] @test value(c3) == sin(1) @test value(c4) == [sin(-1), sin(0)] end @@ -410,7 +407,6 @@ end @testset "JuMP.optimizer_index" begin @test isa(optimizer_index(c1), MOI.ConstraintIndex) @test isa(optimizer_index(c2, label = All), Vector{<:MOI.ConstraintIndex}) - @test isa(optimizer_index(c2, label = All, ndarray = true), Vector{<:MOI.ConstraintIndex}) @test isa(optimizer_index(c3), MOI.ConstraintIndex) @test isa(optimizer_index(c4, label = All), Vector{<:MOI.ConstraintIndex}) end @@ -428,14 +424,12 @@ end @testset "JuMP.dual" begin @test dual(c1) == -1. @test dual(c2, label = UserDefined) == [0., 1.] - @test dual(c2, label = UserDefined, ndarray = true) == [0., 1.] @test dual(c3) == 4 @test dual(c4) == [2, 3] end # test shadow_price @testset "JuMP.shadow_price" begin @test shadow_price(c1) == -1. - @test shadow_price(c1, ndarray = true) == [-1.] @test shadow_price(c2, label = PublicLabel) == [-0., -1.] @test shadow_price(c3) == -4 @test shadow_price(c4) == [-2, -3] @@ -503,11 +497,9 @@ end # test constraint queries @test lp_sensitivity_report(m)[c1] == (-Inf, 0) @test lp_sensitivity_report(m)[c2, label = All] == [(-Inf, 0), (-Inf, 0)] - @test lp_sensitivity_report(m)[c2, ndarray = true] == [(-Inf, 0), (-Inf, 0)] # test variable queries @test lp_sensitivity_report(m)[g] == (0, 0) @test lp_sensitivity_report(m)[inf, label = UserDefined] == [(0, 0), (0, 0)] - @test lp_sensitivity_report(m)[inf, ndarray = true] == [(0, 0), (0, 0)] # test model not up to date set_objective_sense(m, MOI.MIN_SENSE) @testset "Not up-to-date" begin From cb76098d6403d06fa972bdf5e167193b439f2a33 Mon Sep 17 00:00:00 2001 From: pulsipher Date: Mon, 29 Jul 2024 11:41:55 -0400 Subject: [PATCH 3/6] minor cleanup --- src/TranscriptionOpt/model.jl | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/TranscriptionOpt/model.jl b/src/TranscriptionOpt/model.jl index 1e6ff4f2..49d6581a 100644 --- a/src/TranscriptionOpt/model.jl +++ b/src/TranscriptionOpt/model.jl @@ -247,7 +247,6 @@ function _truncate_by_label( label, ::Nothing ) where {T, N} - # TODO carefully revise the logic behind the interesection of different axes return arr[(map(s -> any(l -> l <: label, s), sets) for sets in labels)...] end @@ -715,8 +714,7 @@ end InfiniteOpt.transformation_expression( expr::JuMP.AbstractJuMPScalar, backend::TranscriptionBackend; - [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel, - ndarray::Bool = false] + [label::Type{<:InfiniteOpt.AbstractSupportLabel} = InfiniteOpt.PublicLabel] ) Proper extension of [`InfiniteOpt.transformation_expression`](@ref) for From 334e05806125f5e55b33e5c67e8f15f60dcad6c3 Mon Sep 17 00:00:00 2001 From: pulsipher Date: Mon, 29 Jul 2024 13:29:51 -0400 Subject: [PATCH 4/6] test fixes --- docs/src/guide/measure.md | 26 +++++++++++++------------- docs/src/guide/transcribe.md | 1 + test/TranscriptionOpt/model.jl | 4 ++-- test/TranscriptionOpt/transcribe.jl | 2 +- 4 files changed, 17 insertions(+), 16 deletions(-) diff --git a/docs/src/guide/measure.md b/docs/src/guide/measure.md index 4bd05614..36b22365 100644 --- a/docs/src/guide/measure.md +++ b/docs/src/guide/measure.md @@ -364,12 +364,12 @@ julia> supports(t) julia> transformation_variable(u) 3-element Vector{VariableRef}: - u(support: 1) - u(support: 2) - u(support: 3) + u(0.0) + u(1.0) + u(2.0) julia> objective_function(tmodel) -0.5 u(support: 1)² + u(support: 2)² + 0.5 u(support: 3)² +0.5 u(0.0)² + u(1.0)² + 0.5 u(2.0)² ``` Thus, the integral incorporates the 3 supports generated outside the `integral` declaration. @@ -405,14 +405,14 @@ julia> supports(t) julia> transformation_variable(u) 5-element Vector{VariableRef}: - u(support: 1) - u(support: 2) - u(support: 3) - u(support: 4) - u(support: 5) + u(0.0) + u(0.42264973081) + u(1.0) + u(1.57735026919) + u(2.0) julia> objective_function(tmodel) -u(support: 2)² + u(support: 4)² +u(0.42264973081)² + u(1.57735026919)² ``` The supports used in the objective function are different from the supports used in the transcription of `u`. The integral objective function has been transcribed @@ -459,11 +459,11 @@ julia> supports(t) julia> transformation_variable(u) 2-element Vector{VariableRef}: - u(support: 1) - u(support: 2) + u(0.42264973081) + u(1.57735026919) julia> objective_function(tmodel) -u(support: 1)² + u(support: 2)² +u(0.42264973081)² + u(1.57735026919)² ``` Therefore, using quadratures other than `UniTrapezoid()` or `FEGaussLobatto()` requires careful analysis if there are user-defined supports in the problem. diff --git a/docs/src/guide/transcribe.md b/docs/src/guide/transcribe.md index caeae42b..c5e836b1 100644 --- a/docs/src/guide/transcribe.md +++ b/docs/src/guide/transcribe.md @@ -90,6 +90,7 @@ Subject to y(5.0) ≥ 0 y(10.0) ≥ 0 z binary +``` Thus, we have a transcribed `JuMP` model. To be precise, data on the mapping between the transcribed variables/constraints and their infinite counterparts is also generated diff --git a/test/TranscriptionOpt/model.jl b/test/TranscriptionOpt/model.jl index 1fbf598c..b904d74a 100644 --- a/test/TranscriptionOpt/model.jl +++ b/test/TranscriptionOpt/model.jl @@ -67,7 +67,7 @@ end l2 = ([Set([UserDefined]), Set([UserDefined])], [Set([UserDefined]), Set([PublicLabel])]) @test IOTO._truncate_by_label(a2, l2, PublicLabel, nothing) == a2 - @test IOTO._truncate_by_label(a2, l2, UserDefined, nothing) == [1; 3;;] + @test IOTO._truncate_by_label(a2, l2, UserDefined, nothing) == a2[:, [1]] # vector w/ valid indices a1 = [1, 3] l1 = ([Set([UserDefined]), Set([UserDefined]), Set([PublicLabel]), Set{DataType}()], ) @@ -402,7 +402,7 @@ end data = IOTO.transcription_data(tb) data.finvar_mappings[y] = a data.finvar_mappings[x0] = a - data.infvar_mappings[x] = [a; b;;] + data.infvar_mappings[x] = reshape([a, b], :, 1) data.measure_mappings[meas1] = fill(-2 * zero(AffExpr)) data.measure_mappings[meas2] = [a^2 + c^2 - 2a, b^2 + d^2 - 2a] data.infvar_lookup[x] = Dict([0, 0, 0] => a, [1, 0, 0] => b) diff --git a/test/TranscriptionOpt/transcribe.jl b/test/TranscriptionOpt/transcribe.jl index 93fc6841..0e56dc1e 100644 --- a/test/TranscriptionOpt/transcribe.jl +++ b/test/TranscriptionOpt/transcribe.jl @@ -551,7 +551,7 @@ end @test constraint_object(IOTO.transcription_constraint(c6)).func == [zt, wt] @test IOTO.transcription_constraint(c5) isa Vector{ConstraintRef} @test name(IOTO.transcription_constraint(c2)) == "c2" - @test name(IOTO.transcription_constraint(c1)) == "c1[1, 1]" + @test name(IOTO.transcription_constraint(c1)) in ["c1[1, 1]", "c1[1, 2]"] @test supports(c1) == (0., [0., 0.]) @test IOTO.transcription_constraint(c7) isa ConstraintRef @test isequal(constraint_object(IOTO.transcription_constraint(c7)).func, gr(zt) - 2.) From cee059cdf4be192cc5c0efaa750f4cd35de0261a Mon Sep 17 00:00:00 2001 From: pulsipher Date: Mon, 29 Jul 2024 13:40:33 -0400 Subject: [PATCH 5/6] more fixes --- docs/src/guide/transcribe.md | 1 + test/TranscriptionOpt/model.jl | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/src/guide/transcribe.md b/docs/src/guide/transcribe.md index c5e836b1..51793e2a 100644 --- a/docs/src/guide/transcribe.md +++ b/docs/src/guide/transcribe.md @@ -108,6 +108,7 @@ except the initial condition which naturally is only invoked for the first suppo point. Furthermore, the transcription variable(s) of any variable associated with the infinite model can be determined via [`transformation_variable`](@ref): ```jldoctest transcribe +julia> transformation_variable(y) 3-element Vector{VariableRef}: y(0.0) y(5.0) diff --git a/test/TranscriptionOpt/model.jl b/test/TranscriptionOpt/model.jl index b904d74a..d33d234b 100644 --- a/test/TranscriptionOpt/model.jl +++ b/test/TranscriptionOpt/model.jl @@ -487,8 +487,8 @@ end # test transcription expression for variables with 2 args @testset "IOTO.transcription_expression (Variable 2 Args)" begin @test IOTO.transcription_expression(x0, tb) == a - @test IOTO.transcription_expression(x, tb) == [a;;] - @test IOTO.transcription_expression(x, tb, label = All) == [a; b;;] + @test IOTO.transcription_expression(x, tb) == reshape([a], :, 1) + @test IOTO.transcription_expression(x, tb, label = All) == reshape([a, b], :, 1) end # test transcription expression with 1 argument @testset "IOTO.transcription_expression (1 Arg)" begin From f9b4600fec1f1b97a6e00b3d4a79b338d5c05931 Mon Sep 17 00:00:00 2001 From: pulsipher Date: Mon, 29 Jul 2024 13:52:42 -0400 Subject: [PATCH 6/6] remove old docstrings --- docs/src/manual/transcribe.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/docs/src/manual/transcribe.md b/docs/src/manual/transcribe.md index 9b833b52..b401dd50 100644 --- a/docs/src/manual/transcribe.md +++ b/docs/src/manual/transcribe.md @@ -47,6 +47,4 @@ InfiniteOpt.TranscriptionOpt.parameter_supports(::InfiniteOpt.TranscriptionOpt.T ```@docs InfiniteOpt.TranscriptionOpt.support_index_iterator InfiniteOpt.TranscriptionOpt.index_to_support -InfiniteOpt.TranscriptionOpt.index_to_labels -InfiniteOpt.TranscriptionOpt.make_ndarray ``` \ No newline at end of file