See also https://techtonique.github.io/nnetsauce/

LazyClassifier(
  verbose = 0,
  ignore_warnings = TRUE,
  custom_metric = NULL,
  predictions = FALSE,
  random_state = 42L,
  estimators = "all",
  preprocess = FALSE,
  ...
)

Arguments

verbose

monitor progress (0, default, is false and 1 is true)

ignore_warnings

print trace when model fitting failed

custom_metric

defining a custom metric (default is NULL)

predictions

obtain predictions (default is FALSE)

random_state

reproducibility seed

estimators

specify classifiers to be adjusted (default is 'all')

preprocess

preprocessing input covariates (default is FALSE FALSE)

...

additional parameters to be passed to nnetsauce::CustomClassifier

Value

a list that you can $fit

Examples


library(datasets)

set.seed(123)
X <- as.matrix(iris[, 1:4])
y <- as.integer(iris$Species) - 1L

(index_train <- base::sample.int(n = nrow(X),
                                 size = floor(0.8*nrow(X)),
                                 replace = FALSE))
#>   [1]  14  50 118  43 150 148  90  91 143  92 137  99  72  26   7  78  81 147
#>  [19] 103 117  76  32 106 109 136   9  41  74  23  27  60  53 126 119 121  96
#>  [37]  38  89  34  93  69 138 130  63  13  82  97 142  25 114  21  79 124  47
#>  [55] 144 120  16   6 127  86 132  39  31 134 149 112   4 128 110 102  52  22
#>  [73] 129  87  35  40  30  12  88 123  64 146  67 122  37   8  51  10 115  42
#>  [91]  44  85 107 139  73  20  46  17  54 108  75  80  71  15  24  68 133 145
#> [109]  29 104  45 140 101 135  95 116   5 111  94  49
X_train <- X[index_train, ]
y_train <- y[index_train]
X_test <- X[-index_train, ]
y_test <- y[-index_train]

obj <- LazyClassifier()
res <- obj$fit(X_train, X_test, y_train, y_test)
print(res[[1]])
#> [1] 0.9666667 0.9666667