1) We can incorporate the constraints within the objective function by returning a large number if any constraint is violated.
library(optimx) f <- function(z) { , x <-= z[1] , y <-= z[2]) { if (2*x^2 + 3*y^2 <= 100 && x<=3 && -x<=0 && -y<=-3) 2*x+3*y else 1e10 } optimx(c(0, 3), f, method = c("Nelder", "CG", "L-BFGS-B", "spg", "nlm")) ## p1 p2 value fevals gevals niter convcode kkt1 kkt2 xtime ## Nelder-Mead 0 3 9 187 NA NA 0 FALSE FALSE 0.00 ## CG 0 3 9 41 1 NA 0 FALSE FALSE 0.00 ## L-BFGS-B 0 3 9 21 21 NA 52 FALSE FALSE 0.00 ## spg 0 3 9 1077 NA 1 0 FALSE FALSE 0.05 ## nlm 0 3 9 NA NA 1 0 FALSE FALSE 0.00
1a) This also works with optim where Nelder Mead is the default (or you could try constrOptim which explcitly supports inequality constraints).
optim(c(0, 3), f) ## $par ## [1] 0 3 ## ## $value ## [1] 9 ## ## $counts ## function gradient ## 187 NA $convergence [1] 0 $message NULL
2) Above we notice that the 2x^2 + 3y^2 <= 100 constraint is not active so we can drop it in which case we can just use upper and lower for those methods that use them.
f2 <- function(z, x = z[1], y = z[2]) 2*x+3*y optimx(c(0, 3), f2, lower = c(0, 3), upper = c(3, Inf), method = c("L-BFGS-B", "spg", "nlm")) ## p1 p2 value fevals gevals niter convcode kkt1 kkt2 xtime ## L-BFGS-B 0 3 9 1 1 NA 0 FALSE NA 0.00 ## spg 0 3 9 1 NA 0 0 FALSE NA 0.01 ## nlminb 0 3 9 1 2 1 0 FALSE NA 0.00 ## Warning message: ## In BB::spg(par = par, fn = ufn, gr = ugr, lower = lower, upper = upper, : ## convergence tolerance satisified at intial parameter values.