From 8382cec59dbc00a3960818ab14aa403591a0effb Mon Sep 17 00:00:00 2001 From: Oscar Dowson Date: Wed, 17 Dec 2025 13:38:56 +1300 Subject: [PATCH 1/6] [TambyVanderpooten] improve performance based on profiling --- src/algorithms/TambyVanderpooten.jl | 127 +++++++++++++++++++--------- 1 file changed, 89 insertions(+), 38 deletions(-) diff --git a/src/algorithms/TambyVanderpooten.jl b/src/algorithms/TambyVanderpooten.jl index 1d4fdbd..6bf6e16 100644 --- a/src/algorithms/TambyVanderpooten.jl +++ b/src/algorithms/TambyVanderpooten.jl @@ -41,7 +41,7 @@ function _update_search_region( bounds_to_remove = Vector{Float64}[] bounds_to_add = Dict{Vector{Float64},Vector{Vector{Vector{Float64}}}}() for u in keys(U_N) - if all(y .< u) + if _is_less(y, u, 0) # k=0 here because we want to check every element push!(bounds_to_remove, u) for l in 1:p u_l = _get_child(u, y, l) @@ -55,7 +55,7 @@ function _update_search_region( end else for k in 1:p - if (y[k] == u[k]) && all(_project(y, k) .< _project(u, k)) + if _is_less(y, u, k) push!(U_N[u][k], y) end end @@ -68,6 +68,21 @@ function _update_search_region( return end +function _is_less(y, u, k) + for i in 1:length(y) + if i == k + if !(y[i] == u[i]) + return false + end + else + if !(y[i] < u[i]) + return false + end + end + end + return true +end + function _get_child(u::Vector{Float64}, y::Vector{Float64}, k::Int) @assert length(u) == length(y) return vcat(u[1:(k-1)], y[k], u[(k+1):length(y)]) @@ -80,52 +95,78 @@ function _select_search_zone( ) upper_bounds = collect(keys(U_N)) p = length(yI) - hvs = [ - u[k] == yN[k] ? 0.0 : prod(_project(u, k) .- _project(yI, k)) for - k in 1:p, u in upper_bounds - ] - k_star, j_star = argmax(hvs).I - return k_star, upper_bounds[j_star] + k_star, u_star, v_star = 1, first(upper_bounds), 0.0 + for k in 1:p + for u in upper_bounds + if u[k] != yN[k] + v = 1.0 + for i in 1:p + if i != k + v *= u[i] - yI[i] + end + end + if v > v_star + k_star, u_star, v_star = k, u, v + end + end + end + end + return k_star, u_star end function minimize_multiobjective!( algorithm::TambyVanderpooten, model::Optimizer, ) - @assert MOI.get(model.inner, MOI.ObjectiveSense()) == MOI.MIN_SENSE - warm_start_supported = false - if MOI.supports(model, MOI.VariablePrimalStart(), MOI.VariableIndex) - warm_start_supported = true - end solutions = Dict{Vector{Float64},Dict{MOI.VariableIndex,Float64}}() - variables = MOI.get(model.inner, MOI.ListOfVariableIndices()) - n = MOI.output_dimension(model.f) + status = _minimize_multiobjective!( + algorithm, + model, + model.inner, + model.f, + solutions, + )::MOI.TerminationStatusCode + return status, SolutionPoint[SolutionPoint(X, Y) for (Y, X) in solutions] +end + +function _minimize_multiobjective!( + algorithm::TambyVanderpooten, + model::Optimizer, + inner::MOI.ModelLike, + f::MOI.AbstractVectorFunction, + solutions::Dict{Vector{Float64},Dict{MOI.VariableIndex,Float64}}, +) + @assert MOI.get(inner, MOI.ObjectiveSense()) == MOI.MIN_SENSE + warm_start_supported = + MOI.supports(inner, MOI.VariablePrimalStart(), MOI.VariableIndex) + variables = MOI.get(inner, MOI.ListOfVariableIndices()) + n = MOI.output_dimension(f) yI, yN = zeros(n), zeros(n) - scalars = MOI.Utilities.scalarize(model.f) + scalars = MOI.Utilities.scalarize(f) for (i, f_i) in enumerate(scalars) - MOI.set(model.inner, MOI.ObjectiveFunction{typeof(f_i)}(), f_i) - MOI.set(model.inner, MOI.ObjectiveSense(), MOI.MIN_SENSE) + MOI.set(inner, MOI.ObjectiveFunction{typeof(f_i)}(), f_i) + MOI.set(inner, MOI.ObjectiveSense(), MOI.MIN_SENSE) optimize_inner!(model) - status = MOI.get(model.inner, MOI.TerminationStatus()) + status = MOI.get(inner, MOI.TerminationStatus()) if !_is_scalar_status_optimal(status) - return status, nothing + return status end _, Y = _compute_point(model, variables, f_i) _log_subproblem_solve(model, variables) yI[i] = Y model.ideal_point[i] = Y - MOI.set(model.inner, MOI.ObjectiveSense(), MOI.MAX_SENSE) + MOI.set(inner, MOI.ObjectiveSense(), MOI.MAX_SENSE) optimize_inner!(model) - status = MOI.get(model.inner, MOI.TerminationStatus()) + status = MOI.get(inner, MOI.TerminationStatus()) if !_is_scalar_status_optimal(status) _warn_on_nonfinite_anti_ideal(algorithm, MOI.MIN_SENSE, i) - return status, nothing + return status end _, Y = _compute_point(model, variables, f_i) _log_subproblem_solve(model, variables) yN[i] = Y + 1 end - MOI.set(model.inner, MOI.ObjectiveSense(), MOI.MIN_SENSE) + MOI.set(inner, MOI.ObjectiveSense(), MOI.MIN_SENSE) U_N = Dict{Vector{Float64},Vector{Vector{Vector{Float64}}}}() V = [Tuple{Vector{Float64},Vector{Float64}}[] for k in 1:n] U_N[yN] = [[_get_child(yN, yI, k)] for k in 1:n] @@ -137,7 +178,7 @@ function minimize_multiobjective!( end k, u = _select_search_zone(U_N, yI, yN) MOI.set( - model.inner, + inner, MOI.ObjectiveFunction{typeof(scalars[k])}(), scalars[k], ) @@ -145,7 +186,7 @@ function minimize_multiobjective!( for (i, f_i) in enumerate(scalars) if i != k ci = MOI.Utilities.normalize_and_add_constraint( - model.inner, + inner, f_i, MOI.LessThan{Float64}(u[i] - 1), ) @@ -157,7 +198,7 @@ function minimize_multiobjective!( variables_start = solutions[first(U_N[u][k])] for x_i in variables MOI.set( - model.inner, + inner, MOI.VariablePrimalStart(), x_i, variables_start[x_i], @@ -167,27 +208,27 @@ function minimize_multiobjective!( end optimize_inner!(model) _log_subproblem_solve(model, "auxillary subproblem") - status = MOI.get(model.inner, MOI.TerminationStatus()) + status = MOI.get(inner, MOI.TerminationStatus()) if !_is_scalar_status_optimal(status) MOI.delete.(model, ε_constraints) - return status, nothing + return status end - y_k = MOI.get(model.inner, MOI.ObjectiveValue()) + y_k = MOI.get(inner, MOI.ObjectiveValue())::Float64 sum_f = sum(1.0 * s for s in scalars) - MOI.set(model.inner, MOI.ObjectiveFunction{typeof(sum_f)}(), sum_f) + MOI.set(inner, MOI.ObjectiveFunction{typeof(sum_f)}(), sum_f) y_k_constraint = MOI.Utilities.normalize_and_add_constraint( - model.inner, + inner, scalars[k], MOI.EqualTo(y_k), ) optimize_inner!(model) - status = MOI.get(model.inner, MOI.TerminationStatus()) + status = MOI.get(inner, MOI.TerminationStatus()) if !_is_scalar_status_optimal(status) MOI.delete.(model, ε_constraints) MOI.delete(model, y_k_constraint) - return status, nothing + return status end - X, Y = _compute_point(model, variables, model.f) + X, Y = _compute_point(model, variables, f) _log_subproblem_solve(model, Y) MOI.delete.(model, ε_constraints) MOI.delete(model, y_k_constraint) @@ -205,8 +246,9 @@ function minimize_multiobjective!( push!(bounds_to_remove, u_i) else for (u_j, y_j) in V[k] - if all(_project(u_i, k) .<= _project(u_j, k)) && - isapprox(y_j[k], u_i[k]; atol = 1e-6) + if isapprox(y_j[k], u_i[k]; atol = 1e-6) && + _is_less_eq(u_i, u_j, k) + push!(bounds_to_remove, u_i) end end @@ -219,5 +261,14 @@ function minimize_multiobjective!( end end end - return status, [SolutionPoint(X, Y) for (Y, X) in solutions] + return status +end + +function _is_less_eq(y, u, k) + for i in 1:length(y) + if i != k && !(y[i] <= u[i]) + return false + end + end + return true end From 99c521b69422d0b9fe17c02260656ae93abe0869 Mon Sep 17 00:00:00 2001 From: Oscar Dowson Date: Wed, 17 Dec 2025 13:44:42 +1300 Subject: [PATCH 2/6] Update formatting --- src/algorithms/TambyVanderpooten.jl | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/src/algorithms/TambyVanderpooten.jl b/src/algorithms/TambyVanderpooten.jl index 6bf6e16..e1577f5 100644 --- a/src/algorithms/TambyVanderpooten.jl +++ b/src/algorithms/TambyVanderpooten.jl @@ -177,11 +177,7 @@ function _minimize_multiobjective!( break end k, u = _select_search_zone(U_N, yI, yN) - MOI.set( - inner, - MOI.ObjectiveFunction{typeof(scalars[k])}(), - scalars[k], - ) + MOI.set(inner, MOI.ObjectiveFunction{typeof(scalars[k])}(), scalars[k]) ε_constraints = Any[] for (i, f_i) in enumerate(scalars) if i != k @@ -248,7 +244,6 @@ function _minimize_multiobjective!( for (u_j, y_j) in V[k] if isapprox(y_j[k], u_i[k]; atol = 1e-6) && _is_less_eq(u_i, u_j, k) - push!(bounds_to_remove, u_i) end end From a0281576913e754a28aaa67c845d838879a1673b Mon Sep 17 00:00:00 2001 From: Oscar Dowson Date: Thu, 18 Dec 2025 14:51:04 +1300 Subject: [PATCH 3/6] Update --- src/algorithms/TambyVanderpooten.jl | 205 ++++++++++++++++------------ 1 file changed, 120 insertions(+), 85 deletions(-) diff --git a/src/algorithms/TambyVanderpooten.jl b/src/algorithms/TambyVanderpooten.jl index e1577f5..3b2952a 100644 --- a/src/algorithms/TambyVanderpooten.jl +++ b/src/algorithms/TambyVanderpooten.jl @@ -32,6 +32,7 @@ This algorithm is restricted to problems with: """ struct TambyVanderpooten <: AbstractAlgorithm end +# This is Algorithm 1 from the paper. function _update_search_region( U_N::Dict{Vector{Float64},Vector{Vector{Vector{Float64}}}}, y::Vector{Float64}, @@ -40,14 +41,14 @@ function _update_search_region( p = length(y) bounds_to_remove = Vector{Float64}[] bounds_to_add = Dict{Vector{Float64},Vector{Vector{Vector{Float64}}}}() - for u in keys(U_N) + for (u, u_n) in U_N if _is_less(y, u, 0) # k=0 here because we want to check every element push!(bounds_to_remove, u) for l in 1:p u_l = _get_child(u, y, l) N = [ - k == l ? [y] : [yi for yi in U_N[u][k] if yi[l] < y[l]] - for k in 1:p + k == l ? [y] : [yi for yi in u_n[k] if yi[l] < y[l]] for + k in 1:p ] if all(!isempty(N[k]) for k in 1:p if k != l && u_l[k] != yN[k]) bounds_to_add[u_l] = N @@ -56,7 +57,7 @@ function _update_search_region( else for k in 1:p if _is_less(y, u, k) - push!(U_N[u][k], y) + push!(u_n[k], y) end end end @@ -85,29 +86,37 @@ end function _get_child(u::Vector{Float64}, y::Vector{Float64}, k::Int) @assert length(u) == length(y) - return vcat(u[1:(k-1)], y[k], u[(k+1):length(y)]) + child = copy(u) + child[k] = y[k] + return child end +# This is function h(u, k) in the paper. +function _h(k::Int, u::Vector{Float64}, yI::Vector{Float64}) + h = 1.0 + for (i, (u_i, yI_i)) in enumerate(zip(u, yI)) + if i != k + h *= u_i - yI_i + end + end + return h +end + +# This is Problem (6) in the paper. The upper bound M is our nadir point yN. function _select_search_zone( U_N::Dict{Vector{Float64},Vector{Vector{Vector{Float64}}}}, yI::Vector{Float64}, yN::Vector{Float64}, ) - upper_bounds = collect(keys(U_N)) - p = length(yI) - k_star, u_star, v_star = 1, first(upper_bounds), 0.0 - for k in 1:p - for u in upper_bounds - if u[k] != yN[k] - v = 1.0 - for i in 1:p - if i != k - v *= u[i] - yI[i] - end - end - if v > v_star - k_star, u_star, v_star = k, u, v - end + if isempty(U_N) + return 1, yN + end + k_star, u_star, v_star = 1, yN, -Inf + for k in 1:length(yI), u in keys(U_N) + if !isapprox(u[k], yN[k]; atol = 1e-6) + v = _h(k, u, yI) + if v > v_star + k_star, u_star, v_star = k, u, v end end end @@ -129,6 +138,11 @@ function minimize_multiobjective!( return status, SolutionPoint[SolutionPoint(X, Y) for (Y, X) in solutions] end +# To ensure the type stability of `.inner` and `.f`, we use a function barrier. +# +# We also pass in `solutions` to simplify the function and to ensure that we can +# return at any point from the this inner function with a partial list of primal +# solutions. function _minimize_multiobjective!( algorithm::TambyVanderpooten, model::Optimizer, @@ -141,6 +155,7 @@ function _minimize_multiobjective!( MOI.supports(inner, MOI.VariablePrimalStart(), MOI.VariableIndex) variables = MOI.get(inner, MOI.ListOfVariableIndices()) n = MOI.output_dimension(f) + # Compute the ideal (yI) and nadir (yN) points yI, yN = zeros(n), zeros(n) scalars = MOI.Utilities.scalarize(f) for (i, f_i) in enumerate(scalars) @@ -151,10 +166,9 @@ function _minimize_multiobjective!( if !_is_scalar_status_optimal(status) return status end - _, Y = _compute_point(model, variables, f_i) + _, y_i = _compute_point(model, variables, f_i) _log_subproblem_solve(model, variables) - yI[i] = Y - model.ideal_point[i] = Y + model.ideal_point[i] = yI[i] = y_i MOI.set(inner, MOI.ObjectiveSense(), MOI.MAX_SENSE) optimize_inner!(model) status = MOI.get(inner, MOI.TerminationStatus()) @@ -162,106 +176,127 @@ function _minimize_multiobjective!( _warn_on_nonfinite_anti_ideal(algorithm, MOI.MIN_SENSE, i) return status end - _, Y = _compute_point(model, variables, f_i) + _, y_i = _compute_point(model, variables, f_i) _log_subproblem_solve(model, variables) - yN[i] = Y + 1 + # This is not really the nadir point, it's an upper bound on the + # nadir point because we later add epsilon constraints with a -1. + yN[i] = y_i + 1 end MOI.set(inner, MOI.ObjectiveSense(), MOI.MIN_SENSE) - U_N = Dict{Vector{Float64},Vector{Vector{Vector{Float64}}}}() + # Instead of adding and deleting constraints during the algorithm, we add + # them all here at the beginning. + ε_constraints = [ + MOI.Utilities.normalize_and_add_constraint( + inner, + f_i, + MOI.LessThan(yN[i]), + ) for (i, f_i) in enumerate(scalars) + ] + obj_constants = MOI.constant.(scalars) + # U_N: + # keys: upper bound vectors + # values: a vector with n elements, U_N[u][k] is a vector of y + U_N = Dict{Vector{Float64},Vector{Vector{Vector{Float64}}}}( + # The nadir point, except for the ideal point in position k + yN => [[_get_child(yN, yI, k)] for k in 1:n], + ) V = [Tuple{Vector{Float64},Vector{Float64}}[] for k in 1:n] - U_N[yN] = [[_get_child(yN, yI, k)] for k in 1:n] status = MOI.OPTIMAL + sum_f = sum(1.0 * s for s in scalars) while !isempty(U_N) if (ret = _check_premature_termination(model)) !== nothing status = ret break end k, u = _select_search_zone(U_N, yI, yN) + # Solve problem Π¹(k, u) MOI.set(inner, MOI.ObjectiveFunction{typeof(scalars[k])}(), scalars[k]) - ε_constraints = Any[] - for (i, f_i) in enumerate(scalars) - if i != k - ci = MOI.Utilities.normalize_and_add_constraint( - inner, - f_i, - MOI.LessThan{Float64}(u[i] - 1), - ) - push!(ε_constraints, ci) - end + # Update the constraints y_i < u_i. Note that this is a strict + # equality. We use an ε of 1.0. This is also why we use yN+1 when + # computing the nadir point. + for i in 1:n + u_i = ifelse(i == k, yN[i], u[i]) + set = MOI.LessThan(u_i - 1.0 - obj_constants[i]) + MOI.set(inner, MOI.ConstraintSet(), ε_constraints[i], set) end - if u[k] ≠ yN[k] - if warm_start_supported - variables_start = solutions[first(U_N[u][k])] - for x_i in variables - MOI.set( - inner, - MOI.VariablePrimalStart(), - x_i, - variables_start[x_i], - ) - end + # The isapprox is another way of saying does U_N[u][k] exist + if warm_start_supported && !isapprox(u[k], yN[k]; atol = 1e-6) + for (x_i, start_i) in solutions[last(U_N[u][k])] + MOI.set(inner, MOI.VariablePrimalStart(), x_i, start_i) end end optimize_inner!(model) - _log_subproblem_solve(model, "auxillary subproblem") + # We don't log this first-stage subproblem. status = MOI.get(inner, MOI.TerminationStatus()) if !_is_scalar_status_optimal(status) - MOI.delete.(model, ε_constraints) - return status + break end + # Now solve problem Π²(k, u) y_k = MOI.get(inner, MOI.ObjectiveValue())::Float64 - sum_f = sum(1.0 * s for s in scalars) + set = MOI.LessThan(y_k - obj_constants[k]) + MOI.set(inner, MOI.ConstraintSet(), ε_constraints[k], set) MOI.set(inner, MOI.ObjectiveFunction{typeof(sum_f)}(), sum_f) - y_k_constraint = MOI.Utilities.normalize_and_add_constraint( - inner, - scalars[k], - MOI.EqualTo(y_k), - ) optimize_inner!(model) status = MOI.get(inner, MOI.TerminationStatus()) if !_is_scalar_status_optimal(status) - MOI.delete.(model, ε_constraints) - MOI.delete(model, y_k_constraint) - return status + break end X, Y = _compute_point(model, variables, f) _log_subproblem_solve(model, Y) - MOI.delete.(model, ε_constraints) - MOI.delete(model, y_k_constraint) push!(V[k], (u, Y)) - # We want `if !(Y in U_N[u][k])` but this tests exact equality. We want - # an approximate comparison. + # We want `if !(Y in U_N[u][k])` but this tests exact equality. We + # want an approximate comparison. if all(!isapprox(Y; atol = 1e-6), U_N[u][k]) _update_search_region(U_N, Y, yN) solutions[Y] = X end - bounds_to_remove = Vector{Float64}[] - for u_i in keys(U_N) - for k in 1:n - if isapprox(u_i[k], yI[k]; atol = 1e-6) - push!(bounds_to_remove, u_i) - else - for (u_j, y_j) in V[k] - if isapprox(y_j[k], u_i[k]; atol = 1e-6) && - _is_less_eq(u_i, u_j, k) - push!(bounds_to_remove, u_i) - end - end - end - end + _clean_search_region(U_N, yI, V, k) + end + MOI.delete.(model, ε_constraints) + return status +end + +# This function is lines 10-17 of the paper. We re-order things a bit. The outer +# loop over u′ is the same, but we break the inner `foreach k` loop up into +# separate loops so that we don't need to loop over all `V` if one of the u′ has +# reached the ideal point. +# +# TODO: this loop is a good candidate for parallelisation. +# +# TODO: we could probably also be cleverer here, and just do a partial update +# based on the most recent changes to V. Do we need to keep re-checking +# everything? +function _clean_search_region(U_N, yI, V, k) + for u′ in keys(U_N) + if _clean_search_region_inner(u′, U_N, yI, V, k) + delete!(U_N, u′) end - if !isempty(bounds_to_remove) - for bound_to_remove in bounds_to_remove - delete!(U_N, bound_to_remove) + end + return +end + +function _clean_search_region_inner(u′, U_N, yI, V, k) + for (u′_k, yI_k) in zip(u′, yI) + if isapprox(u′_k, yI_k; atol = 1e-6) + return true + end + end + for (k, V_k) in enumerate(V) + for (u, y_k) in V_k + if _comparison_line_16(u′, u, y_k, k) + return true end end end - return status + return false end -function _is_less_eq(y, u, k) - for i in 1:length(y) - if i != k && !(y[i] <= u[i]) +function _comparison_line_16(u′, u, y_k, k) + if !≈(y_k[k], u′[k]; atol = 1e-6) + return false + end + for (i, (u′_i, u_i)) in enumerate(zip(u′, u)) + if i != k && !(u′_i <= u_i) return false end end From ba4fdf4167db23a91541af20c145c1bb48ce2da4 Mon Sep 17 00:00:00 2001 From: Oscar Dowson Date: Thu, 18 Dec 2025 15:24:01 +1300 Subject: [PATCH 4/6] Update --- src/algorithms/TambyVanderpooten.jl | 26 ++++++++++---------------- 1 file changed, 10 insertions(+), 16 deletions(-) diff --git a/src/algorithms/TambyVanderpooten.jl b/src/algorithms/TambyVanderpooten.jl index 3b2952a..47016ac 100644 --- a/src/algorithms/TambyVanderpooten.jl +++ b/src/algorithms/TambyVanderpooten.jl @@ -193,16 +193,13 @@ function _minimize_multiobjective!( ) for (i, f_i) in enumerate(scalars) ] obj_constants = MOI.constant.(scalars) - # U_N: - # keys: upper bound vectors - # values: a vector with n elements, U_N[u][k] is a vector of y U_N = Dict{Vector{Float64},Vector{Vector{Vector{Float64}}}}( # The nadir point, except for the ideal point in position k yN => [[_get_child(yN, yI, k)] for k in 1:n], ) V = [Tuple{Vector{Float64},Vector{Float64}}[] for k in 1:n] status = MOI.OPTIMAL - sum_f = sum(1.0 * s for s in scalars) + sum_f = MOI.Utilities.operate(+, Float64, scalars...) while !isempty(U_N) if (ret = _check_premature_termination(model)) !== nothing status = ret @@ -219,14 +216,12 @@ function _minimize_multiobjective!( set = MOI.LessThan(u_i - 1.0 - obj_constants[i]) MOI.set(inner, MOI.ConstraintSet(), ε_constraints[i], set) end - # The isapprox is another way of saying does U_N[u][k] exist - if warm_start_supported && !isapprox(u[k], yN[k]; atol = 1e-6) + if warm_start_supported && !isempty(solutions) for (x_i, start_i) in solutions[last(U_N[u][k])] MOI.set(inner, MOI.VariablePrimalStart(), x_i, start_i) end end - optimize_inner!(model) - # We don't log this first-stage subproblem. + optimize_inner!(model) # We don't log this first-stage subproblem. status = MOI.get(inner, MOI.TerminationStatus()) if !_is_scalar_status_optimal(status) break @@ -260,15 +255,14 @@ end # loop over u′ is the same, but we break the inner `foreach k` loop up into # separate loops so that we don't need to loop over all `V` if one of the u′ has # reached the ideal point. -# -# TODO: this loop is a good candidate for parallelisation. -# -# TODO: we could probably also be cleverer here, and just do a partial update -# based on the most recent changes to V. Do we need to keep re-checking -# everything? function _clean_search_region(U_N, yI, V, k) - for u′ in keys(U_N) - if _clean_search_region_inner(u′, U_N, yI, V, k) + map_keys = collect(enumerate(keys(U_N))) + to_delete = fill(false, length(map_keys)) + Threads.@threads for (i, u′) in map_keys + to_delete[i] = _clean_search_region_inner(u′, U_N, yI, V, k) + end + for (i, u′) in map_keys + if to_delete[i] delete!(U_N, u′) end end From d39f82ebf6b059d44de87bbce0c75177fe4d1a1c Mon Sep 17 00:00:00 2001 From: Oscar Dowson Date: Thu, 18 Dec 2025 16:31:33 +1300 Subject: [PATCH 5/6] Update --- src/algorithms/TambyVanderpooten.jl | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/algorithms/TambyVanderpooten.jl b/src/algorithms/TambyVanderpooten.jl index 47016ac..ca81669 100644 --- a/src/algorithms/TambyVanderpooten.jl +++ b/src/algorithms/TambyVanderpooten.jl @@ -108,9 +108,6 @@ function _select_search_zone( yI::Vector{Float64}, yN::Vector{Float64}, ) - if isempty(U_N) - return 1, yN - end k_star, u_star, v_star = 1, yN, -Inf for k in 1:length(yI), u in keys(U_N) if !isapprox(u[k], yN[k]; atol = 1e-6) From 75acbaf3f4b86b7f09bd689bfce17e076ed30dfd Mon Sep 17 00:00:00 2001 From: Oscar Dowson Date: Thu, 18 Dec 2025 16:34:10 +1300 Subject: [PATCH 6/6] Update --- src/algorithms/TambyVanderpooten.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/algorithms/TambyVanderpooten.jl b/src/algorithms/TambyVanderpooten.jl index ca81669..d0436d5 100644 --- a/src/algorithms/TambyVanderpooten.jl +++ b/src/algorithms/TambyVanderpooten.jl @@ -257,7 +257,7 @@ function _clean_search_region(U_N, yI, V, k) to_delete = fill(false, length(map_keys)) Threads.@threads for (i, u′) in map_keys to_delete[i] = _clean_search_region_inner(u′, U_N, yI, V, k) - end + end # COV_EXCL_LINE for (i, u′) in map_keys if to_delete[i] delete!(U_N, u′)