diff --git a/.github/workflows/r-cmd-check.yml b/.github/workflows/r-cmd-check.yml index 790348563..8ee1c320d 100644 --- a/.github/workflows/r-cmd-check.yml +++ b/.github/workflows/r-cmd-check.yml @@ -82,7 +82,7 @@ jobs: if: runner.os == 'Windows' id: get_package_version_windows run: | - $version = Rscript -e 'cat(as.character(packageVersion("torchvision")))' + $version = Rscript -e 'cat(as.character(packageVersion("torchvision")))' echo "TORCHVISION_PACKAGE_VERSION=$version" >> $env:GITHUB_ENV - name: Get torch cache path (Linux/macOS) diff --git a/.gitignore b/.gitignore index 0f6767c2a..3b9b1ba6c 100644 --- a/.gitignore +++ b/.gitignore @@ -9,7 +9,6 @@ mlr3torch*.tgz *~ docs inst/doc -*.html **/.DS_Store /doc/ /Meta/ @@ -17,4 +16,11 @@ CRAN-SUBMISSION paper/data .idea/ .vsc/ -paper/data \ No newline at end of file +paper/data/ +paper/benchmark/registry +.vscode/ +paper/benchmark/registry-linux-cpu/ +paper/benchmark/registry-macos/ +paper/benchmark/registry-linux-gpu/ +paper/benchmark/registry-linux-gpu-optimizer/ +paper/benchmark/registry-linux-gpu-old/ diff --git a/R/learner_torch_methods.R b/R/learner_torch_methods.R index 19d9b7881..be414ef0b 100644 --- a/R/learner_torch_methods.R +++ b/R/learner_torch_methods.R @@ -27,7 +27,8 @@ learner_torch_train = function(self, private, super, task, param_vals) { stopf("Training Dataloader of Learner '%s' has length 0", self$id) } - network = private$.network(task, param_vals)$to(device = param_vals$device) + network = private$.network(task, param_vals) + network$to(device = param_vals$device) if (isTRUE(param_vals$jit_trace) && !inherits(network, "script_module")) { example = get_example_batch(loader_train)$x example = lapply(example, function(x) x$to(device = param_vals$device)) @@ -134,6 +135,8 @@ train_loop = function(ctx, cbs) { ctx$network$train() + forward = get_forward(ctx$network) + # if we increment epoch at the end of the loop it has the wrong value # during the final two callback stages ctx$epoch = 0L @@ -145,6 +148,7 @@ train_loop = function(ctx, cbs) { indices = list() train_iterator = dataloader_make_iter(ctx$loader_train) ctx$step = 0L + eval_train = eval_train_in_epoch(ctx) while (ctx$step < length(ctx$loader_train)) { ctx$step = ctx$step + 1 ctx$batch = dataloader_next(train_iterator) @@ -155,9 +159,9 @@ train_loop = function(ctx, cbs) { call("on_batch_begin") if (length(ctx$batch$x) == 1L) { - ctx$y_hat = ctx$network(ctx$batch$x[[1L]]) + ctx$y_hat = forward(ctx$batch$x[[1L]]) } else { - ctx$y_hat = do.call(ctx$network, ctx$batch$x) + ctx$y_hat = do.call(forward, ctx$batch$x) } loss = ctx$loss_fn(ctx$y_hat, ctx$batch$y) @@ -167,14 +171,16 @@ train_loop = function(ctx, cbs) { call("on_after_backward") ctx$last_loss = loss$item() - predictions[[length(predictions) + 1]] = ctx$y_hat$detach() - indices[[length(indices) + 1]] = as.integer(ctx$batch$.index$to(device = "cpu")) + if (eval_train) { + predictions[[length(predictions) + 1]] = ctx$y_hat$detach() + indices[[length(indices) + 1]] = as.integer(ctx$batch$.index$to(device = "cpu")) + } ctx$optimizer$step() call("on_batch_end") } - ctx$last_scores_train = if (eval_train_in_epoch(ctx)) { + ctx$last_scores_train = if (eval_train) { measure_prediction( pred_tensor = torch_cat(predictions, dim = 1L), measures = ctx$measures_train, diff --git a/R/nn.R b/R/nn.R index e8efe02c6..1e97e6b15 100644 --- a/R/nn.R +++ b/R/nn.R @@ -11,9 +11,5 @@ #' # is the same as: #' po2 = nn("linear") nn = function(.key, ...) { - args = list(...) - if (is.null(args$id)) { - args$id = .key - } - invoke(po, .obj = paste0("nn_", .key), .args = args) + invoke(po, .obj = paste0("nn_", .key), id = .key, ...) } diff --git a/R/utils.R b/R/utils.R index f318013f7..a7bde0e32 100644 --- a/R/utils.R +++ b/R/utils.R @@ -275,6 +275,23 @@ order_named_args = function(f, l) { l2 } +get_forward = function(net) { + if (inherits(net, "script_module")) { + is_training = net$is_training + trainforward = net$trainforward + evalforward = net$evalforward + function(...) { + if (is_training()) { + trainforward(...) + } else { + evalforward(...) + } + } + } else { + net$forward + } +} + #' @title Network Output Dimension #' @description @@ -314,7 +331,7 @@ all_or_none_ = function(...) { single_lazy_tensor = function(task) { identical(task$feature_types[, "type"][[1L]], "lazy_tensor") } - + n_num_features = function(task) { sum(task$feature_types$type %in% c("numeric", "integer")) } @@ -325,4 +342,4 @@ n_categ_features = function(task) { n_ltnsr_features = function(task) { sum(task$feature_types$type == "lazy_tensor") -} \ No newline at end of file +} diff --git a/man-roxygen/paramset_torchlearner.R b/man-roxygen/paramset_torchlearner.R index 46e62e5cb..1256d72f8 100644 --- a/man-roxygen/paramset_torchlearner.R +++ b/man-roxygen/paramset_torchlearner.R @@ -64,7 +64,7 @@ #' The batch size (required). #' * `shuffle` :: `logical(1)`\cr #' Whether to shuffle the instances in the dataset. This is initialized to `TRUE`, -#' which differs from the default (`FALSE`). +#' which differs from the default of the [`torch::dataloader`] which is `FALSE`. #' * `sampler` :: [`torch::sampler`]\cr #' Object that defines how the dataloader draw samples. #' * `batch_sampler` :: [`torch::sampler`]\cr @@ -91,4 +91,4 @@ #' * `worker_packages` :: `character()`\cr #' Which packages to load on the workers. #' -#' Also see `torch::dataloder` for more information. +#' Also see [`torch::dataloder`] for more information. diff --git a/mlr3torch-benchmark-5274609.out b/mlr3torch-benchmark-5274609.out new file mode 100644 index 000000000..cbe869271 --- /dev/null +++ b/mlr3torch-benchmark-5274609.out @@ -0,0 +1,75 @@ +[INFO] Extracting squashfs filesystem... +Parallel unsquashfs: Using 92 processors +57832 inodes (209999 blocks) to write + + +created 55975 files +created 6137 directories +created 1735 symlinks +created 0 devices +created 0 fifos +created 0 sockets + +========== +== CUDA == +========== + +CUDA Version 12.4.1 + +Container image Copyright (c) 2016-2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved. + +This container image and its contents are governed by the NVIDIA Deep Learning Container License. +By pulling and using the container, you accept the terms and conditions of this license: +https://developer.nvidia.com/ngc/nvidia-deep-learning-container-license + +A copy of this license is made available in this container at /NGC-DL-CONTAINER-LICENSE for your convenience. + +WARNING: The NVIDIA Driver was not detected. GPU functionality will not be available. + Use the NVIDIA Container Toolkit to start this container with GPU support; see + https://docs.nvidia.com/datacenter/cloud-native/ . + +R version 4.5.0 (2025-04-11) +Platform: x86_64-pc-linux-gnu +Running under: Ubuntu 22.04.4 LTS + +Matrix products: default +BLAS: /usr/local/lib/R/lib/libRblas.so +LAPACK: /usr/local/lib/R/lib/libRlapack.so; LAPACK version 3.12.1 + +locale: + [1] LC_CTYPE=en_US.UTF-8 LC_NUMERIC=C + [3] LC_TIME=en_US.UTF-8 LC_COLLATE=en_US.UTF-8 + [5] LC_MONETARY=en_US.UTF-8 LC_MESSAGES=en_US.UTF-8 + [7] LC_PAPER=en_US.UTF-8 LC_NAME=C + [9] LC_ADDRESS=C LC_TELEPHONE=C +[11] LC_MEASUREMENT=en_US.UTF-8 LC_IDENTIFICATION=C + +time zone: Etc/UTC +tzcode source: system (glibc) + +attached base packages: +[1] stats graphics grDevices utils datasets methods base + +loaded via a namespace (and not attached): +[1] compiler_4.5.0 + +Attaching package: ‘mlr3misc’ + +The following object is masked from ‘package:batchtools’: + + chunk + +Sourcing configuration file '/mnt/data/mlr3torch/paper/batchtools.conf.R' ... +Loading required package: checkmate +Created registry in '/mnt/data/mlr3torch/paper/benchmark/registry' using cluster functions 'Interactive' +Exporting new objects: 'time_rtorch' ... +Adding problem 'runtime_train' +Adding algorithm 'pytorch' +Adding algorithm 'rtorch' +Adding algorithm 'mlr3torch' +Adding 180 experiments ('runtime_train'[30] x 'rtorch'[2] x repls[3]) ... +Adding 180 experiments ('runtime_train'[30] x 'mlr3torch'[2] x repls[3]) ... +Adding 180 experiments ('runtime_train'[30] x 'pytorch'[2] x repls[3]) ... +Adding 180 experiments ('runtime_train'[30] x 'rtorch'[2] x repls[3]) ... +Adding 180 experiments ('runtime_train'[30] x 'mlr3torch'[2] x repls[3]) ... +Adding 180 experiments ('runtime_train'[30] x 'pytorch'[2] x repls[3]) ... diff --git a/mlr3torch-benchmark-5274611.out b/mlr3torch-benchmark-5274611.out new file mode 100644 index 000000000..476212e80 --- /dev/null +++ b/mlr3torch-benchmark-5274611.out @@ -0,0 +1,573 @@ +[INFO] Extracting squashfs filesystem... +Parallel unsquashfs: Using 92 processors +57832 inodes (209999 blocks) to write + + +created 55975 files +created 6137 directories +created 1735 symlinks +created 0 devices +created 0 fifos +created 0 sockets + +========== +== CUDA == +========== + +CUDA Version 12.4.1 + +Container image Copyright (c) 2016-2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved. + +This container image and its contents are governed by the NVIDIA Deep Learning Container License. +By pulling and using the container, you accept the terms and conditions of this license: +https://developer.nvidia.com/ngc/nvidia-deep-learning-container-license + +A copy of this license is made available in this container at /NGC-DL-CONTAINER-LICENSE for your convenience. + +WARNING: The NVIDIA Driver was not detected. GPU functionality will not be available. + Use the NVIDIA Container Toolkit to start this container with GPU support; see + https://docs.nvidia.com/datacenter/cloud-native/ . + +Reading registry in read-write mode +Loading required package: checkmate +Sourcing configuration file '/mnt/data/mlr3torch/paper/batchtools.conf.R' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... diff --git a/mlr3torch-benchmark-5277868.out b/mlr3torch-benchmark-5277868.out new file mode 100644 index 000000000..8c502ff69 --- /dev/null +++ b/mlr3torch-benchmark-5277868.out @@ -0,0 +1,75 @@ +[INFO] Extracting squashfs filesystem... +Parallel unsquashfs: Using 18 processors +57832 inodes (209999 blocks) to write + + +created 55975 files +created 6137 directories +created 1735 symlinks +created 0 devices +created 0 fifos +created 0 sockets + +========== +== CUDA == +========== + +CUDA Version 12.4.1 + +Container image Copyright (c) 2016-2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved. + +This container image and its contents are governed by the NVIDIA Deep Learning Container License. +By pulling and using the container, you accept the terms and conditions of this license: +https://developer.nvidia.com/ngc/nvidia-deep-learning-container-license + +A copy of this license is made available in this container at /NGC-DL-CONTAINER-LICENSE for your convenience. + +WARNING: The NVIDIA Driver was not detected. GPU functionality will not be available. + Use the NVIDIA Container Toolkit to start this container with GPU support; see + https://docs.nvidia.com/datacenter/cloud-native/ . + +R version 4.5.0 (2025-04-11) +Platform: x86_64-pc-linux-gnu +Running under: Ubuntu 22.04.4 LTS + +Matrix products: default +BLAS: /usr/local/lib/R/lib/libRblas.so +LAPACK: /usr/local/lib/R/lib/libRlapack.so; LAPACK version 3.12.1 + +locale: + [1] LC_CTYPE=en_US.UTF-8 LC_NUMERIC=C + [3] LC_TIME=en_US.UTF-8 LC_COLLATE=en_US.UTF-8 + [5] LC_MONETARY=en_US.UTF-8 LC_MESSAGES=en_US.UTF-8 + [7] LC_PAPER=en_US.UTF-8 LC_NAME=C + [9] LC_ADDRESS=C LC_TELEPHONE=C +[11] LC_MEASUREMENT=en_US.UTF-8 LC_IDENTIFICATION=C + +time zone: Etc/UTC +tzcode source: system (glibc) + +attached base packages: +[1] stats graphics grDevices utils datasets methods base + +loaded via a namespace (and not attached): +[1] compiler_4.5.0 + +Attaching package: ‘mlr3misc’ + +The following object is masked from ‘package:batchtools’: + + chunk + +Sourcing configuration file '/mnt/data/mlr3torch/paper/batchtools.conf.R' ... +Loading required package: checkmate +Created registry in '/mnt/data/mlr3torch/paper/benchmark/registry' using cluster functions 'Interactive' +Exporting new objects: 'time_rtorch' ... +Adding problem 'runtime_train' +Adding algorithm 'pytorch' +Adding algorithm 'rtorch' +Adding algorithm 'mlr3torch' +Adding 180 experiments ('runtime_train'[30] x 'rtorch'[2] x repls[3]) ... +Adding 180 experiments ('runtime_train'[30] x 'mlr3torch'[2] x repls[3]) ... +Adding 180 experiments ('runtime_train'[30] x 'pytorch'[2] x repls[3]) ... +Adding 180 experiments ('runtime_train'[30] x 'rtorch'[2] x repls[3]) ... +Adding 180 experiments ('runtime_train'[30] x 'mlr3torch'[2] x repls[3]) ... +Adding 180 experiments ('runtime_train'[30] x 'pytorch'[2] x repls[3]) ... diff --git a/mlr3torch-benchmark-5277870.out b/mlr3torch-benchmark-5277870.out new file mode 100644 index 000000000..704ab0ec5 --- /dev/null +++ b/mlr3torch-benchmark-5277870.out @@ -0,0 +1,36 @@ +[INFO] Extracting squashfs filesystem... +Parallel unsquashfs: Using 18 processors +57832 inodes (209999 blocks) to write + + +created 55975 files +created 6137 directories +created 1735 symlinks +created 0 devices +created 0 fifos +created 0 sockets + +========== +== CUDA == +========== + +CUDA Version 12.4.1 + +Container image Copyright (c) 2016-2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved. + +This container image and its contents are governed by the NVIDIA Deep Learning Container License. +By pulling and using the container, you accept the terms and conditions of this license: +https://developer.nvidia.com/ngc/nvidia-deep-learning-container-license + +A copy of this license is made available in this container at /NGC-DL-CONTAINER-LICENSE for your convenience. + +WARNING: The NVIDIA Driver was not detected. GPU functionality will not be available. + Use the NVIDIA Container Toolkit to start this container with GPU support; see + https://docs.nvidia.com/datacenter/cloud-native/ . + +Reading registry in read-write mode +Loading required package: checkmate +Sourcing configuration file '/mnt/data/mlr3torch/paper/batchtools.conf.R' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... +Submitting 1 jobs in 1 chunks using cluster functions 'Interactive' ... diff --git a/mlr3torch-paper-5285461.out b/mlr3torch-paper-5285461.out new file mode 100644 index 000000000..105fcacd2 --- /dev/null +++ b/mlr3torch-paper-5285461.out @@ -0,0 +1,27 @@ +[ERROR] No such file or directory: /dss/dssmcmlfs01/pr74ze/pr74ze-dss-0001/ru48nas2/sebffischer+mlr3torch-jss+latest.sqsh + +========== +== CUDA == +========== + +CUDA Version 12.4.1 + +Container image Copyright (c) 2016-2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved. + +This container image and its contents are governed by the NVIDIA Deep Learning Container License. +By pulling and using the container, you accept the terms and conditions of this license: +https://developer.nvidia.com/ngc/nvidia-deep-learning-container-license + +A copy of this license is made available in this container at /NGC-DL-CONTAINER-LICENSE for your convenience. + + + +processing file: paper_code.Rmd +1/2 +2/2 [unnamed-chunk-1] +output file: paper_code.md + +[1] "paper_code.md" +Warning message: +In png(..., res = dpi, units = "in") : + unable to open connection to X11 display '' diff --git a/mlr3torch-paper-5294204.out b/mlr3torch-paper-5294204.out new file mode 100644 index 000000000..b29262af6 --- /dev/null +++ b/mlr3torch-paper-5294204.out @@ -0,0 +1,28 @@ +[ERROR] No such file or directory: /dss/dssmcmlfs01/pr74ze/pr74ze-dss-0001/ru48nas2/sebffischer+mlr3torch-jss+latest.sqsh + +========== +== CUDA == +========== + +CUDA Version 12.4.1 + +Container image Copyright (c) 2016-2023, NVIDIA CORPORATION & AFFILIATES. All rights reserved. + +This container image and its contents are governed by the NVIDIA Deep Learning Container License. +By pulling and using the container, you accept the terms and conditions of this license: +https://developer.nvidia.com/ngc/nvidia-deep-learning-container-license + +A copy of this license is made available in this container at /NGC-DL-CONTAINER-LICENSE for your convenience. + +bash: line 3: conda: command not found + + +processing file: paper_code.Rmd +1/2 +2/2 [unnamed-chunk-1] +output file: paper_code.md + +[1] "paper_code.md" +Warning message: +In png(..., res = dpi, units = "in") : + unable to open connection to X11 display '' diff --git a/paper/.Rprofile b/paper/.Rprofile new file mode 100644 index 000000000..6421479f6 --- /dev/null +++ b/paper/.Rprofile @@ -0,0 +1,18 @@ +# Setting HTTP User Agent to identify OS, such that P3M can detect compatibility +options(HTTPUserAgent = sprintf("R/%s R (%s)", getRversion(), paste(getRversion(), R.version["platform"], R.version["arch"], R.version["os"]))) + +# Ensure curl is used for downloading packages +options(download.file.method = "curl") + +# Enable verbose output for curl and again set HHTP user agent +options(download.file.extra = paste( + # Follow redirects, show errors, and display the HTTP status and URL + '-fsSL -w "%{stderr}curl: HTTP %{http_code} %{url_effective}\n"', + # Configure the R user agent header to install Linux binary packages + sprintf('--header "User-Agent: R (%s)"', paste(getRversion(), R.version["platform"], R.version["arch"], R.version["os"])) +)) + +# for ubuntu: +options( + repos = c(CRAN = "https://packagemanager.posit.co/cran/__linux__/jammy/latest") +) diff --git a/paper/README.md b/paper/README.md new file mode 100644 index 000000000..453c54a82 --- /dev/null +++ b/paper/README.md @@ -0,0 +1,118 @@ +# Reproducing the Results + +## Computational Environment + +To recreate the computational environment, you can download the docker image +`sebffischer/mlr3torch-jss:latest` from dockerhub. + +```bash +enroot import docker://index.docker.io#sebffischer/mlr3torch-jss:latest +``` + +Next, you can create a docker container from the image: + +For GPU: + +```bash +enroot create --name mlr3torch-jss sebffischer+mlr3torch-jss+gpu.sqsh +``` + +For CPU: + +```bash +enroot create --name mlr3torch-jss sebffischer+mlr3torch-jss+cpu.sqsh +``` + + +To start the container, run: + +```bash +enroot start \ + --mount < parent-dir-to-mlr3torch>:/mnt/data \ + mlr3torch-jss bash +``` + +## Running the Benchmark + +Note that while the benchmark uses `batchtools` for experiment management, we don't use it for job submission in order to ensure that all GPU and CPU benchmarks respectively are run on the same machine. + +### Running locally + +To run the benchmarks locally, go into `./paper` (to have the right `.Rprofile`). + +To initialize the benchmark experiment, run: + +```bash +Rscript benchmark/benchmark.R +``` + +To start the CPU experiments, run: +Note that it's important to have enough RAM, otherwise the benchmarks will be incomparable. + +```bash +Rscript benchmark/run_cpu.R +``` + +To start the GPU experiments, run: + +```bash +Rscript benchmark/run_gpu.R +``` + + +### Running on the cluster + +Exemplary slurm scripts are provided via `benchmark_init.sh`, `benchmark_cpu.sh`, and `benchmark_gpu.sh`. +These need to be adapted to the specific cluster and job submission system. + +### Running a subset of the Jobs + +To run a subset of the jobs, you can adjust the runner scripts to do something along the lines of: + +```r +reg = loadRegistry("~/mlr3torch/paper/benchmark/registry", writeable = TRUE) +tbl = unwrap(getJobTable(reg)) +ids = tbl[device == "cpu" & n_layers == 10 & latent == 250 & jit & optimizer == "adamw" & repl == 1, ]$job.id +for (id in sample(ids)) { + submitJobs(id) + Sys.sleep(0.1) +} +``` + +### Collecting the Results + +Once the benchmark experiments are finished, you can collect the results by running: + +```bash +Rscript benchmark/summarize.R +``` + +This will create the `benchmark/results.rds` file. + + +### Generating the Plots + +Simply run: + +```r +Rscript paper/plot_benchmark.R +``` + + +## Running the Paper Code + +In the docker container, run the following code from the `./paper` directory. +This requires access to an NVIDIA GPU. + +```r +knitr::knit('paper_code.Rmd') +``` + +We also provide a version of the paper code that runs on CPU only. +There, we set the epochs to 0 everywhere and the device to "cpu". + +TODOOOO + +```r +knitr::knit('paper_code_cheap.Rmd') +``` diff --git a/paper/batchtools.conf.R b/paper/batchtools.conf.R new file mode 100644 index 000000000..c1667d6e3 --- /dev/null +++ b/paper/batchtools.conf.R @@ -0,0 +1 @@ +cluster.functions = batchtools::makeClusterFunctionsInteractive() diff --git a/paper/benchmark/.python-version b/paper/benchmark/.python-version new file mode 100644 index 000000000..86f8c02eb --- /dev/null +++ b/paper/benchmark/.python-version @@ -0,0 +1 @@ +3.13.5 diff --git a/paper/benchmark/attic/benchmark2_cpu.R b/paper/benchmark/attic/benchmark2_cpu.R new file mode 100644 index 000000000..a5e013db3 --- /dev/null +++ b/paper/benchmark/attic/benchmark2_cpu.R @@ -0,0 +1,178 @@ +library(batchtools) +library(mlr3misc) + +reg = makeExperimentRegistry( + file.dir = here::here("paper", "benchmark", "registry_cpu"), + packages = "checkmate" +) +reg$cluster.functions = makeClusterFunctionsInteractive() + +source(here::here("paper/benchmark/time_rtorch.R")) + +batchExport(list( + time_rtorch = time_rtorch +)) + +addProblem( + "runtime_train", + data = NULL, + fun = function( + epochs, + batch_size, + n_layers, + latent, + n, + p, + optimizer, + device, + ... + ) { + problem = list( + epochs = assert_int(epochs), + batch_size = assert_int(batch_size), + n_layers = assert_int(n_layers), + latent = assert_int(latent), + n = assert_int(n), + p = assert_int(p), + optimizer = assert_choice( + optimizer, + c("ignite_adamw", "adamw", "sgd", "ignite_sgd") + ), + device = assert_choice(device, c("cuda", "cpu")) + ) + + problem + } +) + +addAlgorithm("pytorch", fun = function(instance, job, data, jit, ...) { + f = function(...) { + library(reticulate) + x = try( + { + reticulate::use_python("/usr/bin/python3", required = TRUE) + reticulate::source_python(here::here("paper/benchmark/time_pytorch.py")) + print(reticulate::py_config()) + time_pytorch(...) + }, + silent = TRUE + ) + print(x) + } + args = c(instance, list(seed = job$seed, jit = jit)) + #do.call(f, args) + callr::r(f, args = args) +}) + +addAlgorithm("rtorch", fun = function(instance, job, opt_type, jit, ...) { + assert_choice(opt_type, c("standard", "ignite")) + if (opt_type == "ignite") { + instance$optimizer = paste0("ignite_", instance$optimizer) + } + callr::r(time_rtorch, args = c(instance, list(seed = job$seed, jit = jit))) +}) + +addAlgorithm("mlr3torch", fun = function(instance, job, opt_type, jit, ...) { + if (opt_type == "ignite") { + instance$optimizer = paste0("ignite_", instance$optimizer) + } + callr::r( + time_rtorch, + args = c(instance, list(seed = job$seed, mlr3torch = TRUE, jit = jit)) + ) +}) + +# global config: +REPLS = 4L +EPOCHS = 20L +N = 2000L +P = 1000L + +# cuda experiments: + +problem_design = expand.grid( + list( + n = N, + p = P, + epochs = EPOCHS, + latent = c(1000, 2500, 5000), + optimizer = c("sgd", "adamw"), + batch_size = 32L, + device = "cuda", + n_layers = c(2L, 4L, 6L, 8L, 10L, 12L, 14L, 16L) + ), + stringsAsFactors = FALSE +) + + +addExperiments( + prob.designs = list( + runtime_train = problem_design + ), + algo.designs = list( + rtorch = data.frame( + jit = c(FALSE, TRUE), + opt_type = c("ignite"), + tag = "cuda_exp" + ), + mlr3torch = data.frame( + jit = c(FALSE, TRUE), + opt_type = c("ignite"), + tag = "cuda_exp" + ), + pytorch = data.frame( + jit = c(FALSE, TRUE), + tag = "cuda_exp" + ) + ), + repls = REPLS +) + +# cpu experiments: +# (need smaller networks, otherwise too expensive with the cuda config) + +problem_design = expand.grid( + list( + n = N, + p = P, + epochs = EPOCHS, + # factor 10 smaller than cuda + latent = c(100, 250, 500), + optimizer = c("sgd", "adamw"), + batch_size = 32L, + device = "cpu", + n_layers = c(2L, 4L, 6L, 8L, 10L, 12L, 14L, 16L) + ), + stringsAsFactors = FALSE +) + +addExperiments( + prob.designs = list( + runtime_train = problem_design + ), + algo.designs = list( + rtorch = data.frame( + jit = c(FALSE, TRUE), + opt_type = c("ignite"), + tag = "cpu_exp" + ), + mlr3torch = data.frame( + jit = c(FALSE, TRUE), + opt_type = c("ignite"), + tag = "cpu_exp" + ), + pytorch = data.frame( + jit = c(FALSE, TRUE), + tag = "cpu_exp" + ) + ), + repls = REPLS +) + + +tbl = unwrap(getJobTable()) +tbl = tbl[device == "cpu", ] +for (id in sample(tbl$job.id)) { + print(id) + submitJobs(id) +} \ No newline at end of file diff --git a/paper/benchmark/attic/benchmark_cpu.R b/paper/benchmark/attic/benchmark_cpu.R new file mode 100644 index 000000000..70b8360e9 --- /dev/null +++ b/paper/benchmark/attic/benchmark_cpu.R @@ -0,0 +1,166 @@ +library(batchtools) +library(mlr3misc) + +reg = makeExperimentRegistry( + file.dir = here::here("paper", "benchmark", "registry_cpu"), + packages = "checkmate" +) +reg$cluster.functions = makeClusterFunctionsInteractive() + +source(here::here("paper/benchmark/time_rtorch.R")) + +batchExport(list( + time_rtorch = time_rtorch +)) + +# The algorithm should return the total runtime needed for training, the SD, but also the performance of the training losses so we know it is all working +addProblem("runtime_train", + data = NULL, + fun = function(epochs, batch_size, n_layers, latent, n, p, optimizer, device, ...) { + problem = list( + epochs = assert_int(epochs), + batch_size = assert_int(batch_size), + n_layers = assert_int(n_layers), + latent = assert_int(latent), + n = assert_int(n), + p = assert_int(p), + optimizer = assert_choice(optimizer, c("ignite_adamw", "adamw", "sgd", "ignite_sgd")), + device = assert_choice(device, c("cuda", "cpu")) + ) + + problem + } +) + +# pytorch needs to be submitted with an active pytorch environment +addAlgorithm("pytorch", + fun = function(instance, job, data, jit, ...) { + f = function(...) { + library(reticulate) + x = try({ + reticulate::use_python("/usr/bin/python3", required = TRUE) + reticulate::source_python(here::here("paper/benchmark/time_pytorch.py")) + print(reticulate::py_config()) + time_pytorch(...) + }, silent = TRUE) + print(x) + + } + args = c(instance, list(seed = job$seed, jit = jit)) + #do.call(f, args) + callr::r(f, args = args) + } +) + +addAlgorithm("rtorch", + fun = function(instance, job, opt_type, jit,...) { + assert_choice(opt_type, c("standard", "ignite")) + if (opt_type == "ignite") { + instance$optimizer = paste0("ignite_", instance$optimizer) + } + #do.call(time_rtorch, args = c(instance, list(seed = job$seed, jit = jit))) + callr::r(time_rtorch, args = c(instance, list(seed = job$seed, jit = jit))) + } +) + +addAlgorithm("mlr3torch", + fun = function(instance, job, opt_type, jit, ...) { + if (opt_type == "ignite") { + instance$optimizer = paste0("ignite_", instance$optimizer) + } + do.call(time_rtorch, args = c(instance, list(seed = job$seed, mlr3torch = TRUE, jit = jit))) + #callr::r(time_rtorch, args = c(instance, list(seed = job$seed, mlr3torch = TRUE, jit = jit))) + } +) + +# global config: +REPLS = 4L +EPOCHS = 20L +N = 2000L +P = 1000L + +# cuda experiments: + + +problem_design = expand.grid(list( + n = N, + p = P, + epochs = EPOCHS, + latent = c(1000, 2500, 5000), + optimizer = c("sgd", "adamw"), + batch_size = 32L, + device = "cuda", + n_layers = c(2L, 4L, 6L, 8L, 10L, 12L, 14L, 16L) +), stringsAsFactors = FALSE) + + +addExperiments( + prob.designs = list( + runtime_train = problem_design + ), + algo.designs = list( + rtorch = data.frame( + jit = c(FALSE, TRUE), + opt_type = c("ignite"), + tag = "cuda_exp" + ), + mlr3torch = data.frame( + jit = c(FALSE, TRUE), + opt_type = c("ignite"), + tag = "cuda_exp" + ), + pytorch = data.frame( + jit = c(FALSE, TRUE), + tag = "cuda_exp" + ) + ), + repls = REPLS +) + +# cpu experiments: +# (need smaller networks, otherwise too expensive with the cuda config) + +problem_design = expand.grid(list( + n = N, + p = P, + epochs = EPOCHS, + # factor 10 smaller than cuda + latent = c(50, 100, 200), + optimizer = c("sgd", "adamw"), + batch_size = 32L, + device = "cpu", + n_layers = c(2L, 4L, 6L, 8L, 10L, 12L, 14L, 16L) +), stringsAsFactors = FALSE) + +addExperiments( + prob.designs = list( + runtime_train = problem_design + ), + algo.designs = list( + rtorch = data.frame( + jit = c(FALSE, TRUE), + opt_type = c("ignite"), + tag = "cpu_exp" + ), + mlr3torch = data.frame( + jit = c(FALSE, TRUE), + opt_type = c("ignite"), + tag = "cpu_exp" + ), + pytorch = data.frame( + jit = c(FALSE, TRUE), + tag = "cpu_exp" + ) + ), + repls = REPLS +) + +ids = sample(findJobs()[[1L]]) +tbl = unwrap(getJobTable()) +ids = tbl[device == "cpu" & n_layers == 10 & latent == 200 & jit & optimizer == "adamw" & repl == 1, ]$job.id +# there is a bug in batchtools that sorts the IDs +# when submitting them together +for (id in sample(ids)) { + submitJobs(id) + Sys.sleep(0.1) +} diff --git a/paper/benchmark/attic/mlr3torch.html b/paper/benchmark/attic/mlr3torch.html new file mode 100644 index 000000000..50133f75d --- /dev/null +++ b/paper/benchmark/attic/mlr3torch.html @@ -0,0 +1,4637 @@ + + +
+ +=i.length)return n;var r=[],u=o[e++];return n.forEach(function(n,u){r.push({key:n,values:t(u,e)})}),u?r.sort(function(n,t){return u(n.key,t.key)}):r}var e,r,u={},i=[],o=[];return u.map=function(t,e){return n(e,t,0)},u.entries=function(e){return t(n(ta.map,e,0),0)},u.key=function(n){return i.push(n),u},u.sortKeys=function(n){return o[i.length-1]=n,u},u.sortValues=function(n){return e=n,u},u.rollup=function(n){return r=n,u},u},ta.set=function(n){var t=new m;if(n)for(var e=0,r=n.length;r>e;++e)t.add(n[e]);return t},c(m,{has:h,add:function(n){return this._[s(n+="")]=!0,n},remove:g,values:p,size:v,empty:d,forEach:function(n){for(var t in this._)n.call(this,f(t))}}),ta.behavior={},ta.rebind=function(n,t){for(var e,r=1,u=arguments.length;++r=0&&(r=n.slice(e+1),n=n.slice(0,e)),n)return arguments.length<2?this[n].on(r):this[n].on(r,t);if(2===arguments.length){if(null==t)for(n in this)this.hasOwnProperty(n)&&this[n].on(r,null);return this}},ta.event=null,ta.requote=function(n){return n.replace(ma,"\\$&")};var ma=/[\\\^\$\*\+\?\|\[\]\(\)\.\{\}]/g,ya={}.__proto__?function(n,t){n.__proto__=t}:function(n,t){for(var e in t)n[e]=t[e]},Ma=function(n,t){return t.querySelector(n)},xa=function(n,t){return t.querySelectorAll(n)},ba=function(n,t){var e=n.matches||n[x(n,"matchesSelector")];return(ba=function(n,t){return e.call(n,t)})(n,t)};"function"==typeof Sizzle&&(Ma=function(n,t){return Sizzle(n,t)[0]||null},xa=Sizzle,ba=Sizzle.matchesSelector),ta.selection=function(){return ta.select(ua.documentElement)};var _a=ta.selection.prototype=[];_a.select=function(n){var t,e,r,u,i=[];n=N(n);for(var o=-1,a=this.length;++o=0&&(e=n.slice(0,t),n=n.slice(t+1)),wa.hasOwnProperty(e)?{space:wa[e],local:n}:n}},_a.attr=function(n,t){if(arguments.length<2){if("string"==typeof n){var e=this.node();return n=ta.ns.qualify(n),n.local?e.getAttributeNS(n.space,n.local):e.getAttribute(n)}for(t in n)this.each(z(t,n[t]));return this}return this.each(z(n,t))},_a.classed=function(n,t){if(arguments.length<2){if("string"==typeof n){var e=this.node(),r=(n=T(n)).length,u=-1;if(t=e.classList){for(;++u =0?n.slice(0,t):n,r=t>=0?n.slice(t+1):"in";return e=cl.get(e)||al,r=ll.get(r)||y,Mu(r(e.apply(null,ea.call(arguments,1))))},ta.interpolateHcl=Lu,ta.interpolateHsl=Tu,ta.interpolateLab=Ru,ta.interpolateRound=Du,ta.transform=function(n){var t=ua.createElementNS(ta.ns.prefix.svg,"g");return(ta.transform=function(n){if(null!=n){t.setAttribute("transform",n);var e=t.transform.baseVal.consolidate()}return new Pu(e?e.matrix:sl)})(n)},Pu.prototype.toString=function(){return"translate("+this.translate+")rotate("+this.rotate+")skewX("+this.skew+")scale("+this.scale+")"};var sl={a:1,b:0,c:0,d:1,e:0,f:0};ta.interpolateTransform=Hu,ta.layout={},ta.layout.bundle=function(){return function(n){for(var t=[],e=-1,r=n.length;++e
+Mike Sherov
+Greg Hazel