[R] RandomForest tuning the parameters

varin sacha v@r|n@@ch@ @end|ng |rom y@hoo@|r
Sat May 13 18:10:46 CEST 2023


Dear Eric thanks for your code,

Dear R-experts,

Here below my R code, still the same error message at the end while trying to tune the parameters! 
Any hints to make it work would be appreciated.

Best,

########################################################################
# Create features and target

x1=c(12,14,34,23,25,45,65,76,38,49,61,73,28,39,90,56,43,23,56,78,45,65,34,78,90,81,23,21,25,34,65,45,78,72,27,54,69,97,65,32,56,78,21,15,41,60,90,70,56,30,10,9,22,39,90,50,20,10,30,40)

x2=c(98,99,100,120,165,154,189,122,111,100,199,187,178,163,192,202,210,120,132,143,145,210,156,111,178,183,158,199,122,134,158,145,156,173,198,123,143,162,178,123,159,145,198,190,180,120,100,130,200,111,100,123,211,134,123,156,176,198,199,200)

y=c(0.4,0.6,0.67,0.45,1.4,2.4,1.9,2.3,4.5,3.7,3,5,6.2,1.67,3.2,2.5,3.5,2,5.4,3,3,2,6,-2,-4.5,-2.4,-7.4,5,3.4,-2.4,5,1,1.3,-2.1,2,3.4,4,-2.4,1,-3.5,3,2,1,-4,-3,-2,3,6,4,10,2,3,4,3,1.3,5.4,3,2,1.5,3)

X <- data.frame(x1,x2)

y <- y

#libraries
library(lattice)
library(ggplot2)
library(caret)
library(randomForest)
 
# Split data into training and test sets
index <- createDataPartition(y, p=0.75, list=FALSE)
X_train <- X[ index, ]
X_test <- X[-index, ]
y_train <- y[index]
y_test<-y[-index]
# Train the model 
regr <- randomForest(x = X_train, y = y_train , maxnodes = 10, ntree = 500)
 
#Tuning the parameters
N=500

X_train_ = X_train[1:N , ]

y_train_ = y_train[1:N]

seed <-7

metric<-'RMSE'


customRF <- list(type = "Regression", library = "randomForest", loop = NULL)
 

customRF$parameters <- data.frame(parameter = c("maxnodes", "ntree"), class = rep("numeric", 2), label = c("maxnodes", "ntree"))

 

customRF$grid <- function(x, y, len = NULL, search = "grid") {}

 
customRF$fit <- function(x, y, wts, param, lev, last, weights, classProbs, ...) {

  randomForest(x, y, maxnodes = param$maxnodes, ntree=param$ntree, ...)

}

 
customRF$predict <- function(modelFit, newdata, preProc = NULL, submodels = NULL)

  predict(modelFit, newdata)

customRF$prob <- function(modelFit, newdata, preProc = NULL, submodels = NULL)

  predict(modelFit, newdata, type = "prob")

customRF$sort <- function(x) x[order(x[,1]),]

customRF$levels <- function(x) x$classes

 
# Set grid search parameters
control <- trainControl(method="repeatedcv", number=10, repeats=3, search='grid')
 
# Outline the grid of parameters
tunegrid <- expand.grid(.maxnodes=c(10,20,30,50), .ntree=c(100, 200, 300))
set.seed(seed)
 
# Train the model
rf_gridsearch <- train(x=X_train_, y=y_train_, method=customRF, metric=metric, tuneGrid=tunegrid, trControl=control)
 
plot(rf_gridsearch)

rf_gridsearch$bestTune
########################################################################
 







Le mardi 9 mai 2023 à 09:44:53 UTC+2, Eric Berger <ericjberger using gmail.com> a écrit : 





Hi Sacha,
On second thought, perhaps this is more the direction that you want ...

X2 = cbind(X_train,y_train)
colnames(X2)[3] = "y"
regr2<-randomForest(y~x1+x2, data=X2,maxnodes=10, ntree=10)
regr
regr2

#Make prediction
predictions= predict(regr, X_test)
predictions2= predict(regr2, X_test)

HTH,
Eric


On Tue, May 9, 2023 at 6:40 AM Eric Berger <ericjberger using gmail.com> wrote:
> Hi,
> One problem you have is with the command:
> regr<-randomForest(y~x1+x2, data=X_train, proximity=TRUE)
> 
> What you need is something like this:
> 
> X2 <- cbind(X,y)
> regr<-randomForest(y~x1+x2, data=X2, proximity=TRUE)
> 
> HTH,
> Eric
> 
> 
> On Mon, May 8, 2023 at 11:11 PM varin sacha via R-help
> <r-help using r-project.org> wrote:
>>
>> Dear R-experts,
>>
>> Here below a toy example with some error messages, especially at the end of the code (Tuning the parameters). Your help to correct my R code would be highly appreciated.
>>
>>
>> #######################################
>> #libraries
>> library(lattice)
>> library(ggplot2)
>> library(caret)
>> library(randomForest)
>>
>> #Data
>> y=c(23,34,32,12,24,35,45,56,76,87,54,34,23,45,41,13,16,98,35,65,56,67,78,89,87,64,53,31,14,34,45,46,57,69,90,80,70,65,50,45,60,56,87,79,64,34,25,47,61,24,10,13,12,15,46,58,76,89,90,98)
>> x1=c(4,5,6,7,1,10,19,20,21,14,23,6,5,32,15,12,16,14,2,3,4,5,3,2,1,2,6,7,5,4,3,2,1,3,4,6,7,9,5,4,3,7,10,11,12,13,10,3,2,5,6,9,8,7,4,12,15,16,2,3)
>> x2=c(0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,1,1,1,1,1,0,0,0,1,0,0,1,0,0,0,1,1,0,1,0,0,0,1,1,1,1,0,1,0,1,0,0,1,1,0,0,1,0,0,1,1)
>>
>> y=as.numeric(y)
>> x1=as.numeric(x1)
>> x2=as.factor(x2)
>>
>> X=data.frame(x1,x2)
>> y=y
>>
>> #Split data into training and test sets
>> index=createDataPartition(y, p=0.75, list=FALSE)
>> X_train = X[index, ]
>> X_test = X[-index, ]
>> y_train= y[index ]
>> y_test = y[-index ]
>>
>> #Train de model
>> regr=randomForest (x=X_train, y=y_train, maxnodes=10, ntree=10)
>>
>> regr<-randomForest(y~x1+x2, data=X_train, proximity=TRUE)
>> regr
>>
>> #Make prediction
>> predictions= predict(regr, X_test)
>>
>> result= X_test
>> result['y'] = y_test
>> result['prediction'] = predictions
>> result
>>
>> # Import library for Metrics
>> library(Metrics)
>>
>> print(paste0('MAE: ' , mae(y_test,predictions) ))
>> print(paste0('MSE: ' ,caret::postResample(predictions , y_test)['RMSE']^2 ))
>> print(paste0('R2: ' ,caret::postResample(predictions , y_test)['Rsquared'] ))
>>
>>
>> #Tuning the parameters
>> N=500 #length(X_train)
>> X_train_ = X_train[1:N , ]
>> y_train_ = y_train[1:N]
>>
>> seed <-7
>> metric<-'RMSE'
>>
>> customRF <- list(type = "Regression", library = "randomForest", loop = NULL)
>>
>> customRF$parameters <- data.frame(parameter = c("maxnodes", "ntree"), class = rep("numeric", 2), label = c("maxnodes", "ntree"))
>>
>> customRF$grid <- function(x, y, len = NULL, search = "grid") {}
>>
>> customRF$fit <- function(x, y, wts, param, lev, last, weights, classProbs, ...) {
>>
>>  randomForest(x, y, maxnodes = param$maxnodes, ntree=param$ntree, ...)
>>
>> }
>>
>> customRF$predict <- function(modelFit, newdata, preProc = NULL, submodels = NULL)
>>
>> predict(modelFit, newdata)
>>
>> customRF$prob <- function(modelFit, newdata, preProc = NULL, submodels = NULL)
>>
>>   predict(modelFit, newdata, type = "prob")
>>
>> customRF$sort <- function(x) x[order(x[,1]),]
>>
>> customRF$levels <- function(x) x$classes
>>
>>
>> # Set grid search parameters
>> control <- trainControl(method="repeatedcv", number=10, repeats=3, search='grid')
>>
>> # Outline the grid of parameters
>> tunegrid <- expand.grid(.maxnodes=c(10,20,30,50), .ntree=c(100, 200, 300))
>> set.seed(seed)
>>
>> # Train the model
>> rf_gridsearch <- train(x=X_train_, y=y_train_, method=customRF, metric=metric, tuneGrid=tunegrid, trControl=control)
>>
>> plot(rf_gridsearch)
>>
>> rf_gridsearch$bestTune
>>
>> #################################################
>>
>> ______________________________________________
>> R-help using r-project.org mailing list -- To UNSUBSCRIBE and more, see
>> https://stat.ethz.ch/mailman/listinfo/r-help
>> PLEASE do read the posting guide http://www.R-project.org/posting-guide.html
>> and provide commented, minimal, self-contained, reproducible code.
> 



More information about the R-help mailing list