See also https://techtonique.github.io/nnetsauce/

DeepMTS(
  obj,
  n_layers = 3L,
  n_hidden_features = 5L,
  activation_name = "relu",
  a = 0.01,
  nodes_sim = "sobol",
  bias = TRUE,
  dropout = 0,
  direct_link = TRUE,
  n_clusters = 2L,
  cluster_encode = TRUE,
  type_clust = "kmeans",
  lags = 1L,
  replications = NULL,
  kernel = NULL,
  agg = "mean",
  seed = 123L,
  backend = c("cpu", "gpu", "tpu"),
  verbose = 0,
  ...
)

Arguments

obj

a model object

n_layers

number of hidden layers

...

additional parameters to be passed to nnetsauce::CustomRegressor

Examples


set.seed(123)
X <- matrix(rnorm(300), 100, 3)

obj <- sklearn$linear_model$ElasticNet()
obj2 <- DeepMTS(obj)

obj2$fit(X)
#> DeepMTS(dropout=0.0,
#>         obj=CustomRegressor(dropout=0.0,
#>                             obj=CustomRegressor(dropout=0.0, obj=ElasticNet())),
#>         verbose=0.0)
obj2$predict()
#>      series0    series1    series2
#> 1 0.09698047 -0.1014573 0.09947172
#> 2 0.09698047 -0.1014573 0.09947172
#> 3 0.09698047 -0.1014573 0.09947172
#> 4 0.09698047 -0.1014573 0.09947172
#> 5 0.09698047 -0.1014573 0.09947172