Skip to content

Commit

Permalink
Updated.
Browse files Browse the repository at this point in the history
  • Loading branch information
SinghRavin committed Mar 16, 2023
1 parent b12f482 commit 14378cf
Show file tree
Hide file tree
Showing 8 changed files with 37 additions and 24 deletions.
3 changes: 2 additions & 1 deletion DESCRIPTION
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,8 @@ Imports:
stats,
Rmpfr,
nnet,
pROC
pROC,
bench
Suggests:
knitr,
rmarkdown,
Expand Down
1 change: 1 addition & 0 deletions NAMESPACE
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ export(feed_forward)
export(map)
export(sigmoid)
importFrom(Rmpfr,pmax)
importFrom(bench,mark)
importFrom(nnet,nnet)
importFrom(pROC,roc)
importFrom(stats,predict)
Expand Down
13 changes: 12 additions & 1 deletion R/BackPropNN.R
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,22 @@ NULL
#' @importFrom nnet nnet
NULL

#' @importFrom bench mark
NULL

#' BackPropNN
#'
#' A collection of functions
#'
#' @description We add stuff up...
#' @description This R-package will contain functions that will implement NN
#' training (via back-propagation) from scratch (using basic R packages).
#' The function will take data (X and Y) as an input and will ask user to specify
#' no. of input nodes, no. of hidden nodes, no. of output nodes, learning rate, and an
#' activation function to be used. The function will produce the NN model in terms
#' of matrices containing weights for each nodes. The user will be able to choose
#' activation function between ReLU and Sigmoid. The user will be able to compare
#' the performance of this R-package in comparison to existing R NN packages in
#' terms of accuracy and computational time.
#'
#' @docType package
#' @name BackPropNN
Expand Down
20 changes: 8 additions & 12 deletions R/back_propagation_training.R
Original file line number Diff line number Diff line change
Expand Up @@ -13,21 +13,17 @@
#' @return A list of class \code{BackPropNN_back_propagation_training}:
#' @examples
#' set.seed(100)
#' i <- 2 # number of input nodes
#' h <- 2 # number of hidden nodes
#' o <- 1 # number of output nodes
#' learning_rate <- 0.1 # The learning rate of the algorithm
#' activation_func <- "sigmoid" # the activation function
#' data <- data.frame(X1 = 1:100, X2 = 2:101, Y = sample(c(0,1), 100, replace=TRUE))
#' nn_model <- back_propagation_training(i, h, o, learning_rate, activation_func, data)
#' nn_model <- back_propagation_training(i=2, h=2, o=1, learning_rate=0.01,
#' activation_func="sigmoid", data=data)
#'
#' @export
back_propagation_training <- function(i, h, o, learning_rate, activation_func, data){

W_IH = matrix(0.01,h,i)
W_HO = matrix(0.01,o,h)
B_H = matrix(0.01,h,1)
B_O = matrix(0.01,o,1)
W_IH = matrix(0.01,nrow=h,ncol=i)
W_HO = matrix(0.01,nrow=o,ncol=h)
B_H = matrix(0.01,nrow=h,ncol=1)
B_O = matrix(0.01,nrow=o,ncol=1)

X = as.matrix(data[1:ncol(data)-1])
Y = as.matrix(data[,ncol(data)])
Expand Down Expand Up @@ -86,7 +82,7 @@ plot.BackPropNN_back_propagation_training <- function(x) {
Y = as.matrix(data[,ncol(data)])

nn_R <- nnet::nnet(X,Y,size=x$num_nodes[2], trace=FALSE)
nn_R_pred <- as.numeric(stats::predict(nn_R,X))
nn_R_pred <- as.numeric(stats::predict(nn_R,X, type="raw"))

pROC::roc(data[,ncol(data)],feed_forward(data,x)$pred,
plot=TRUE, print.auc=TRUE, main="ROC curve by BackPropNN")
Expand Down Expand Up @@ -114,7 +110,7 @@ print.BackPropNN_back_propagation_training <- function(x) {
Y = as.matrix(data[,ncol(data)])

nn_R <- nnet::nnet(X,Y,size=x$num_nodes[2], trace=FALSE)
nn_R_pred <- as.numeric(stats::predict(nn_R,X))
nn_R_pred <- as.numeric(stats::predict(nn_R,X, type="raw"))
nn_R_mse <- mean((Y - nn_R_pred)^2)

my_nn_mse <- mean((Y - feed_forward(x$input_data,x)$pred)^2)
Expand Down
Empty file removed main.R
Empty file.
10 changes: 9 additions & 1 deletion man/BackPropNN.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 2 additions & 6 deletions man/back_propagation_training.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 3 additions & 3 deletions vignettes/BackPropNN.Rmd
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,13 @@ knitr::opts_chunk$set(
library(BackPropNN)
```

# Simulated data - OR data.
# Simulated data - AND data.

```{r}
num_obs <- 10000 # Number of observations
num_obs <- 100000 # Number of observations
X1 <- sample(c(0,1),num_obs, replace = TRUE)
X2 <- sample(c(0,1),num_obs, replace = TRUE)
Y <- ifelse(X1==0 & X2==0, 0, 1)
Y <- ifelse(X1==1 & X2==1, 1, 0)
data <- data.frame(X1,X2,Y)
```

Expand Down

0 comments on commit 14378cf

Please sign in to comment.