Skip to content

Commit

Permalink
replace "AF_ConVar" by "AF_UEI".
Browse files Browse the repository at this point in the history
  • Loading branch information
Jiangyan-Zhao committed Nov 21, 2023
1 parent 6294bb4 commit 360e396
Show file tree
Hide file tree
Showing 15 changed files with 455 additions and 129 deletions.
4 changes: 2 additions & 2 deletions DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
Package: EPBO
Title: Bayesian Optimization via Exact Penalty
Version: 0.0.9
Date: 2023-11-03
Version: 0.0.11
Date: 2023-11-20
Authors@R:
person(given = "Jiangyan",
family = "Zhao",
Expand Down
2 changes: 2 additions & 0 deletions NAMESPACE
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ export(AF_ScaledEI_Kernel)
export(AF_ScaledEI_MC)
export(AF_TS)
export(AF_UEI)
export(AF_UEI_Kernel)
export(AF_UEI_MC)
export(AF_VEI)
export(bilog)
export(normalize)
Expand Down
5 changes: 2 additions & 3 deletions R/AF_ConVar.R
Original file line number Diff line number Diff line change
Expand Up @@ -47,14 +47,13 @@ AF_ConVar = function(x, fgpi, Cgpi, equal)
mu_C = pred_C$mean
sigma_C = sqrt(pred_C$s2)
if(equal[j]){
LCB[,j] = abs(mu_C) - 3*sigma_C
LCB[,j] = abs(mu_C) - 6*sigma_C
}else{
LCB[,j] = mu_C - 3*sigma_C
LCB[,j] = mu_C - 6*sigma_C
}
}

## Acquaisition function
AF = sigma_f
infeasible = apply(LCB > 0, 1, any)
if(all(infeasible)){
AF = sigma_f
Expand Down
10 changes: 4 additions & 6 deletions R/AF_LCB.R
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,6 @@
#'
#' @param Cgpi description
#'
#' @param epbest description
#'
#' @param rho description
#'
#' @param equal an optional vector containing zeros and ones, whose length equals the number of
Expand All @@ -37,7 +35,7 @@
#'
#'

AF_LCB = function(x, fgpi, fmean, fsd, Cgpi, Cmean, Csd, epbest, rho, equal)
AF_LCB = function(x, fgpi, fmean, fsd, Cgpi, rho, equal)
{
if(is.null(nrow(x))) x = matrix(x, nrow=1)
ncand = nrow(x) # number of the candidate points
Expand All @@ -52,15 +50,15 @@ AF_LCB = function(x, fgpi, fmean, fsd, Cgpi, Cmean, Csd, epbest, rho, equal)
mu_C = sigma_C = omega = matrix(NA, nc, ncand)
for (j in 1:nc) {
pred_C = predGPsep(Cgpi[j], x, lite=TRUE)
mu_C[j,] = pred_C$mean * Csd[j] + Cmean[j]
sigma_C[j,] = sqrt(pred_C$s2) * Csd[j]
mu_C[j,] = pred_C$mean
sigma_C[j,] = sqrt(pred_C$s2)
omega[j,] = (equal[j]+1)*pnorm(mu_C[j,]/sigma_C[j,]) - equal[j]
}

## Acquaisition function
mu_ep = mu_f + rho%*%(omega*mu_C)
sigma_ep = sqrt(sigma_f^2 + (rho^2)%*%((omega*sigma_C)^2))
LCB = mu_ep - sigma_ep
LCB = mu_ep - 3*sigma_ep
return(LCB)
}

11 changes: 6 additions & 5 deletions R/AF_UEI.R
Original file line number Diff line number Diff line change
Expand Up @@ -36,15 +36,15 @@
#'
#'

AF_UEI = function(x, fgpi, Cgpi, epbest, rho, equal)
AF_UEI = function(x, fgpi, fmean, fsd, Cgpi, epbest, rho, equal, beta=3)
{
if(is.null(nrow(x))) x = matrix(x, nrow=1)
ncand = nrow(x) # number of the candidate points

## objective
pred_f = predGPsep(fgpi, x, lite=TRUE)
mu_f = pred_f$mean
sigma_f = sqrt(pred_f$s2)
mu_f = pred_f$mean * fsd + fmean
sigma_f = sqrt(pred_f$s2) * fsd

## constraints
nc = length(Cgpi) # number of the constraint
Expand All @@ -62,7 +62,8 @@ AF_UEI = function(x, fgpi, Cgpi, epbest, rho, equal)
d = (epbest - mu_ep)/sigma_ep
EI = sigma_ep * (d*pnorm(d) + dnorm(d)) # expected improvement
VI = sigma_ep^2 * ((d^2+1)*pnorm(d) + d*dnorm(d)) - EI^2 # variance of the improvement (remove sigma_ep)
VI = pmax(0, VI)
UEI = EI + 2*sqrt(VI) # Scaled expected improvement
VI = pmax(.Machine$double.xmin, VI)
UEI = EI + beta*sqrt(VI)
EI[is.nan(UEI)] = 0
return(UEI)
}
69 changes: 69 additions & 0 deletions R/AF_UEI_Kernel.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
#' @title UEI acquisition function
#'
#' @description Scaled expected improvement
#'
#' @param x description
#'
#' @param fgpi description
#'
#' @param Cgpi description
#'
#' @param epbest description
#'
#' @param rho description
#'
#' @param equal an optional vector containing zeros and ones, whose length equals the number of
#' constraints, specifying which should be treated as equality constraints (\code{1}) and
#' which as inequality (\code{0})
#'
#'
#' @returns AF
#'
#' @author Jiangyan Zhao \email{[email protected]}
#'
#' @references Noe, U. and D. Husmeier (2018). On a new improvement-based acquisition function
#' for Bayesian optimization. \emph{arXiv:1808.06918}.
#'
#' @import laGP
#' @importFrom stats dnorm
#' @importFrom stats pnorm
#'
#'
#' @export
#'
#' @examples
#' B = rbind(c(0, 1), c(0, 1))
#'
#'

AF_UEI_Kernel = function(x, fgpi, fmean, fsd, Cgpi, epbest, rho, equal, beta=3, type="UK")
{
if(is.null(nrow(x))) x = matrix(x, nrow=1)
ncand = nrow(x) # number of the candidate points

## objective
pred_f = predict(object=fgpi, newdata=x, type=type, checkNames = FALSE, light.return = TRUE)
mu_f = pred_f$mean * fsd + fmean
sigma_f = pred_f$sd * fsd

## constraints
nc = length(Cgpi) # number of the constraint
mu_C = sigma_C = omega = matrix(NA, nc, ncand)
for (j in 1:nc) {
pred_C = predict(object=Cgpi[[j]], newdata=x, type=type, checkNames = FALSE, light.return = TRUE)
mu_C[j,] = pred_C$mean
sigma_C[j,] = pred_C$sd
omega[j,] = (equal[j]+1)*pnorm(mu_C[j,]/sigma_C[j,]) - equal[j]
}

## Acquaisition function
mu_ep = mu_f + rho%*%(omega*mu_C)
sigma_ep = sqrt(sigma_f^2 + (rho^2)%*%((omega*sigma_C)^2))
d = (epbest - mu_ep)/sigma_ep
EI = sigma_ep * (d*pnorm(d) + dnorm(d)) # expected improvement
VI = sigma_ep^2 * ((d^2+1)*pnorm(d) + d*dnorm(d)) - EI^2 # variance of the improvement (remove sigma_ep)
VI = pmax(.Machine$double.xmin, VI)
UEI = EI + beta*sqrt(VI)
UEI[is.nan(UEI)] = 0
return(UEI)
}
83 changes: 83 additions & 0 deletions R/AF_UEI_MC.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
#' @title UEI acquisition function
#'
#' @description Scaled expected improvement
#'
#' @param x description
#'
#' @param fgpi description
#'
#' @param fmean description
#'
#' @param fsd description
#'
#' @param Cgpi description
#'
#' @param epbest description
#'
#' @param rho description
#'
#' @param equal an optional vector containing zeros and ones, whose length equals the number of
#' constraints, specifying which should be treated as equality constraints (\code{1}) and
#' which as inequality (\code{0})
#'
#' @param N description
#'
#' @param beta description
#'
#' @returns AF
#'
#' @author Jiangyan Zhao \email{[email protected]}
#'
#' @references Noe, U. and D. Husmeier (2018). On a new improvement-based acquisition function
#' for Bayesian optimization. \emph{arXiv:1808.06918}.
#'
#' @import laGP
#' @importFrom stats dnorm
#' @importFrom stats pnorm
#'
#'
#' @export
#'
#' @examples
#' B = rbind(c(0, 1), c(0, 1))
#'
#'

AF_UEI_MC = function(x, fgpi, fmean, fsd, Cgpi, epbest, rho, equal, N=1000, beta=3)
{
if(is.null(nrow(x))) x = matrix(x, nrow=1)
ncand = nrow(x) # number of the candidate points

## objective
pred_f = predGPsep(fgpi, x, lite=TRUE)
mu_f = pred_f$mean * fsd + fmean
sigma_f = sqrt(pred_f$s2) * fsd

## constraints
nc = length(Cgpi) # number of the constraint
mu_C = sigma_C = matrix(NA, nc, ncand)
for (j in 1:nc) {
pred_C = predGPsep(Cgpi[j], x, lite=TRUE)
mu_C[j,] = pred_C$mean
sigma_C[j,] = sqrt(pred_C$s2)
}

## expected improvement
EP = matrix(NA, nrow = N, ncol = ncand)
for (n in 1:N) {
EP[n,] = rnorm(ncand, mu_f, sigma_f)
for (j in 1:nc) {
if(equal[j]){
EP[n,] = EP[n,] + rho[j]*abs(rnorm(ncand, mu_C[j,], sigma_C[j,]))
}else{
EP[n,] = EP[n,] + rho[j]*pmax(0, rnorm(ncand, mu_C[j,], sigma_C[j,]))
}
}
}
improvement = matrix(pmax(0, epbest-EP), nrow = N)
EI = colMeans(improvement) # expected improvement
SI = colSds(improvement) # Standard deviation of the improvement
UEI = EI + beta * SI
EI[is.nan(UEI)] = 0
return(UEI)
}
Loading

0 comments on commit 360e396

Please sign in to comment.