Parameters description can be found at https://techtonique.github.io/nnetsauce/

MTS(
  obj,
  n_hidden_features = 5L,
  activation_name = "relu",
  a = 0.01,
  nodes_sim = "sobol",
  bias = TRUE,
  dropout = 0,
  direct_link = TRUE,
  n_clusters = 2L,
  cluster_encode = TRUE,
  type_clust = "kmeans",
  lags = 1L,
  replications = NULL,
  kernel = NULL,
  agg = "mean",
  seed = 123L,
  backend = c("cpu", "gpu", "tpu"),
  verbose = 0
)

Examples


# Example 1 -----

set.seed(123)
X <- matrix(rnorm(300), 100, 3)

obj <- sklearn$linear_model$ElasticNet()
obj2 <- MTS(obj)

obj2$fit(X)
#> MTS(dropout=0.0, kernel=None, obj=ElasticNet(), verbose=0.0)
obj2$predict()
#>      series0    series1    series2
#> 1 0.09698047 -0.1014573 0.09947172
#> 2 0.09698047 -0.1014573 0.09947172
#> 3 0.09698047 -0.1014573 0.09947172
#> 4 0.09698047 -0.1014573 0.09947172
#> 5 0.09698047 -0.1014573 0.09947172


# Example 2 -----

set.seed(123)
X <- matrix(rnorm(300), 100, 3)

obj <- sklearn$linear_model$BayesianRidge()
obj2 <- MTS(obj)

obj2$fit(X)
#> MTS(dropout=0.0, kernel=None, obj=BayesianRidge(), verbose=0.0)
obj2$predict(return_std = TRUE)
#> DescribeResult(mean=            series0  series1  series2
#> date                                 
#> 2025-03-09     0.10    -0.05     0.10
#> 2025-03-10     0.10    -0.13     0.10
#> 2025-03-11     0.10    -0.13     0.10
#> 2025-03-12     0.10    -0.13     0.10
#> 2025-03-13     0.10    -0.13     0.10, lower=            series0  series1  series2
#> date                                 
#> 2025-03-09    -1.69    -1.94    -1.72
#> 2025-03-10    -1.69    -2.01    -1.72
#> 2025-03-11    -1.69    -2.01    -1.72
#> 2025-03-12    -1.69    -2.01    -1.72
#> 2025-03-13    -1.69    -2.01    -1.72, upper=            series0  series1  series2
#> date                                 
#> 2025-03-09     1.88     1.84     1.91
#> 2025-03-10     1.88     1.76     1.92
#> 2025-03-11     1.88     1.76     1.92
#> 2025-03-12     1.88     1.76     1.92
#> 2025-03-13     1.88     1.76     1.92)