diff --git a/src/cache.jl b/src/cache.jl index 375d5aa..7d52932 100644 --- a/src/cache.jl +++ b/src/cache.jl @@ -62,7 +62,7 @@ function OptimizationCache(prob::SciMLBase.OptimizationProblem, opt; g = SciMLBase.requiresgradient(opt), h = SciMLBase.requireshessian(opt), hv = SciMLBase.requireshessian(opt), fg = SciMLBase.allowsfg(opt), fgh = SciMLBase.allowsfgh(opt), cons_j = SciMLBase.requiresconsjac(opt), cons_h = SciMLBase.requiresconshess(opt), - cons_vjp = SciMLBase.allowsconsjvp(opt), cons_jvp = SciMLBase.allowsconsjvp(opt), lag_h = SciMLBase.requireslagh(opt)) + cons_vjp = SciMLBase.allowsconsjvp(opt), cons_jvp = SciMLBase.allowsconsjvp(opt), lag_h = SciMLBase.requireslagh(opt), sense = prob.sense) if structural_analysis obj_res, cons_res = symify_cache(f, prob, num_cons, manifold) diff --git a/src/function.jl b/src/function.jl index f05e035..822a358 100644 --- a/src/function.jl +++ b/src/function.jl @@ -113,7 +113,7 @@ end function OptimizationBase.instantiate_function( f::OptimizationFunction{true}, x, ::SciMLBase.NoAD, - p, num_cons = 0; kwargs...) + p, num_cons = 0; sense, kwargs...) if f.grad === nothing grad = nothing else @@ -205,7 +205,8 @@ function OptimizationBase.instantiate_function( expr = symbolify(f.expr) cons_expr = symbolify.(f.cons_expr) - return OptimizationFunction{true}(f.f, SciMLBase.NoAD(); + obj_f = (x,p) -> sense == MaxSense ? -1.0* f.f(x,p) : f.f(x,p) + return OptimizationFunction{true}(obj_f, SciMLBase.NoAD(); grad = grad, fg = fg, hess = hess, fgh = fgh, hv = hv, cons = cons, cons_j = cons_j, cons_h = cons_h, cons_vjp = cons_vjp, cons_jvp = cons_jvp,