diff --git a/DESCRIPTION b/DESCRIPTION index d8bcde82..d4bc0f65 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -23,12 +23,14 @@ Imports: igraph, stats, ggrepel, BiocParallel, - utils + utils, + gsignal Suggests: BiocStyle, knitr, rmarkdown, testthat, - rgl + rgl, + microbenchmark Authors@R: c(person("Kim-Anh", "Le Cao", role = "aut", email = "kimanh.lecao@unimelb.edu.au"), person("Florian", "Rohart", role = "aut"), @@ -64,5 +66,5 @@ biocViews: ImmunoOncology, MultipleComparison, Classification, Regression -RoxygenNote: 7.2.3 +RoxygenNote: 7.3.2 Encoding: UTF-8 diff --git a/NAMESPACE b/NAMESPACE index 091e1d20..5c658326 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -108,6 +108,7 @@ S3method(summary,mixo_pls) S3method(summary,mixo_spls) S3method(summary,pca) S3method(summary,rcc) +export("%fp%") export(auroc) export(background.predict) export(block.pls) @@ -121,15 +122,21 @@ export(color.GreenRed) export(color.jet) export(color.mixo) export(color.spectral) +export(dctii_m_transforms) export(explained_variance) +export(facewise_product) +export(facewise_transpose) +export(ft) export(get.BER) export(get.confusion_matrix) export(imgCor) export(impute.nipals) export(ipca) export(logratio.transfo) +export(m_product) export(map) export(mat.rank) +export(matrix_to_m_transforms) export(mint.block.pls) export(mint.block.plsda) export(mint.block.spls) @@ -160,6 +167,9 @@ export(spca) export(spls) export(splsda) export(study_split) +export(tpca) +export(tpls) +export(tsvdm) export(tune) export(tune.block.splsda) export(tune.mint.splsda) diff --git a/R/mixOmics-package.R b/R/mixOmics-package.R index cb804483..9b2dbaab 100644 --- a/R/mixOmics-package.R +++ b/R/mixOmics-package.R @@ -28,7 +28,7 @@ #' Canonical Correlation Analysis and P-integration with variants of multi-group #' Partial Least Squares. #' -#' @docType package +#' @docType _PACKAGE #' @name mixOmics-package NULL diff --git a/R/tens.mproduct.R b/R/tens.mproduct.R new file mode 100644 index 00000000..85641bcb --- /dev/null +++ b/R/tens.mproduct.R @@ -0,0 +1,256 @@ +# ============================================================================== +# utilities and functions for Kilmer's m product +# ============================================================================== +# bltodo: use classes for m and minv? + +#' @description Apply a function across the last dimension of an input vector, +#' matrix, or tensor. This function defines both a parallel algorithm using +#' BiocParrallel, and also a simple \code{apply} algorithm. Even on Windows +#' Machines, setting \code{bpparam = BiocParallel::SerialParam()} offers a +#' notable speedup for larger 3D array inputs. +#' @param x Numerical array input. +#' @param mat Function which defines the tubal transform. +#' @param bpparam A \linkS4class{BiocParallelParam} object indicating the type +#' of parallelisation. +#' @return A tensor of the same size under the specified tubal transform, +#' denoted \hat{x}. +#' @author Brendan Lu +#' @keywords internal +.apply_mat_transform <- function(x, mat, bpparam) { + if (length(dim(x)) == 1) { + return(mat %*% x) + } else if (length(dim(x)) == 2) { + return(mat %*% x) + } else if (length(dim(x)) == 3) { + n <- dim(x)[1] + p <- dim(x)[2] + t <- dim(x)[3] + if (is.null(bpparam)) { + # apply algorithm -------------------------------------------------------- + transformed_slices <- apply( + aperm(x, c(3, 1, 2)), c(2, 3), # permuted tensor with mode 3 in front + function(slice) mat %*% slice # left multiply by transform mat + ) + # permute back to original orientation + return(aperm(array(transformed_slices, dim = c(t, n, p)), c(2, 3, 1))) + # ------------------------------------------------------------------------ + } else { + # BiocParallel algorithm ------------------------------------------------- + transformed_slices <- BiocParallel::bplapply( + lapply(seq_len(p), function(i) t(x[, i, ])), # a list of t x n matrices + function(slice) mat %*% slice, # left multiply by transform mat + BPPARAM = bpparam + ) + # unlist and cast into array with p facewise t x n matrices + # then appropriately rotate to get original n x p x t matrix + return( + aperm(array(unlist(transformed_slices), dim = c(t, n, p)), c(2, 3, 1)) + ) + # ------------------------------------------------------------------------ + } + } else { + # error: some array >3D has been inputted + stop( + "Only order 1 (vector), 2 (matrix), 3 (3D tensor) arrays are supported" + ) + } +} + +#' @description Validate appropriate 'null-ness' of m, minv inputs. +#' @author Brendan Lu +#' @keywords internal +.stop_invalid_transform_input <- function(m, minv) { + if ( + xor(is.function(m), is.function(minv)) || + xor(is.null(m), is.null(minv)) + ) { + stop( + "If explicitly defined, both m and its inverse must be defined as + functions" + ) + } +} + +#' @description Returns functions \code{m} and \code{m_inv} which apply tubal +#' transforms defined by the matrix \code{m_mat}. +#' @param mat Function which defines the tubal transform. +#' @param m_inv_mat Function which defines inverse tubal transform +#' @param bpparam A \linkS4class{BiocParallelParam} object indicating the type +#' of parallelisation. +#' @return +#' \item{m}{A function which applies the matrix m_mat along the last dimension +#' of a given numerical input array. For 3D tensors it performs the mode-3 +#' product.} +#' \item{minv}{The inverse of m.} +#' @author Brendan Lu +#' @export +matrix_to_m_transforms <- function( + m_mat, + m_inv_mat = NULL, + bpparam = NULL +) { + # error: non-matrix input + stopifnot(length(dim(m_mat)) == 2) + # error: non-square matrix input + stopifnot(dim(m_mat)[1] == dim(m_mat)[2]) + + # invert m_mat if m_inv_mat not specified + if (is.null(m_inv_mat)) { + # error: non-invertible input + m_inv_mat <- solve(m_mat) + } else { + # error: specified matrices are not the same size + stopifnot(identical(dim(m_mat), dim(m_inv_mat))) + } + + return(list( + m = function(x) .apply_mat_transform(x, m_mat, bpparam = bpparam), + minv = function(x) .apply_mat_transform(x, m_inv_mat, bpparam = bpparam) + )) +} + +#' @description Returns functions \code{m} and \code{m_inv} which apply tubal +#' transforms defined by the Discrete Cosine Transform (DCT-II variant). This +#' is equivalent to Scipys DCTI-ii algorithm with \code{norm='ortho'}. +#' @param t The length of the transform. +#' @param bpparam A \linkS4class{BiocParallelParam} object indicating the type +#' of parallelisation. +#' @return +#' \item{m}{A function which applies the dct-ii along the last dimension of a +#' given numerical input array. For 3D tensors it performs the mode-3 product +#' with the DCT matrix.} +#' \item{minv}{The inverse of m,} +#' @author Brendan Lu +#' @export +dctii_m_transforms <- function(t, bpparam = NULL) { + return(matrix_to_m_transforms(m_mat = gsignal::dctmtx(t), bpparam = bpparam)) +} + +#' @description Compute Kilmer's facewise product. Note that the for-loop +#' implementation is relatively fast, and very readable. There's also a +#' BiocParralel implementation here, but it lacks significant benchmarking +#' results. +#' bltodo: the parallel algorithm is probably stupid remove sometime? +#' @author Brendan Lu +#' @keywords internal +.binary_facewise <- function(a, b, bpparam) { + na <- dim(a)[1] + pa <- dim(a)[2] + ta <- dim(a)[3] + + nb <- dim(b)[1] + pb <- dim(b)[2] + tb <- dim(b)[3] + + # error: different t for each input + stopifnot(ta == tb) + t <- ta + # error: non-conforming facewise dimensions + stopifnot(pa == nb) + + if (is.null(bpparam)) { + # for-loop algorithm ------------------------------------------------------- + fp_ab <- array(0, dim = c(na, pb, t)) + for (i in seq_len(t)) { + fp_ab[, , i] <- a[, , i] %*% b[, , i] + } + return(fp_ab) + # -------------------------------------------------------------------------- + } else { + # BiocParallel algorithm --------------------------------------------------- + # bltodo: benchmark / investigate preallocation here? + return( + simplify2array( + BiocParallel::bplapply( + array(seq_len(t)), + FUN = function(i) a[, , i] %*% b[, , i], + BPPARAM = bpparam + ) + ) + ) + # -------------------------------------------------------------------------- + } +} + +#' @description Compute Kilmer's facewise product cumulatively across any +#' arbitrary number of tensor inputs. +#' @param ... Arbitrary number of numerical tensor inputs. +#' @param bpparam A \linkS4class{BiocParallelParam} object indicating the type +#' of parallelisation. +#' @return Cumulative facewise product. +#' @author Brendan Lu +#' @export +facewise_product <- function(..., bpparam = NULL) { + return( + Reduce( + function(a, b) .binary_facewise(a, b, bpparam = bpparam), + list(...) + ) + ) +} + +#' @describeIn facewise_product Custom facewise product operator +#' @export +`%fp%` <- function(a, b) .binary_facewise(a, b, bpparam = NULL) + +#' @description Perform a facewise transpose on an order-3 tensor. +#' @param tensor Numerical 3D array input. +#' @return Facewise transpose of \code{tensor} +#' @author Brendan Lu +#' @aliases ft facewise_transpose +#' @export +facewise_transpose <- function(tensor) { + return(aperm(tensor, c(2, 1, 3))) +} + +#' @describeIn facewise_transpose Alias for \code{\link{facewise_product}} +#' @export +ft <- function(tensor) { + return(facewise_transpose(tensor)) +} + +#' @description Compute Kilmer's tensor-tensor m-product cumulatively across any +#' arbitrary number of tensor inputs. +#' @param ... Arbitrary number of numerical tensor inputs. +#' @param m A function which applies an orthogonal tensor tubal transform. +#' @param minv The inverse of m. +#' @param bpparam A \linkS4class{BiocParallelParam} object indicating the type +#' of parallelisation. Does not have any effect if transform functions +#' explicitly set using \code{m}, \code{minv}. +#' @return Cumulative m-product. +#' @author Brendan Lu +#' @export +m_product <- function( + ..., + m = NULL, + minv = NULL, + bpparam = NULL +) { + tensors <- list(...) + if (length(tensors) == 0) { + stop("No input tensors provided.") + } else { + t <- dim(tensors[[1]])[3] + } + if ( + xor(is.function(m), is.function(minv)) || + xor(is.null(m), is.null(minv)) + ) { + stop( + "If explicitly defined, both m and its inverse must be defined as + functions." + ) + } + # use dctii as default transform if user does not specify an explicit one + if (is.null(m)) { + transforms <- dctii_m_transforms(t, bpparam = bpparam) + m <- transforms$m + minv <- transforms$minv + } + return(minv( + Reduce( + function(a, b) .binary_facewise(a, b, bpparam = NULL), + lapply(list(...), m) + ) + )) +} diff --git a/R/tens.tpca.R b/R/tens.tpca.R new file mode 100644 index 00000000..f4961329 --- /dev/null +++ b/R/tens.tpca.R @@ -0,0 +1,171 @@ +# ============================================================================== +# Tensor pca based on Mor's TCAM algorithm +# ============================================================================== + +#' @description R implementation of np.unravel_index. NOTE: currently only works +#' for 1D to 2D column-major conversion, and returns a list of 2D indices. +#' Returns a matrix output of length(indices) columns, with two rows. The first +#' row corresponds to the sorted k indices, and the second row contains the +#' sorted t indices. +#' @author Brendan Lu +#' @keywords internal +.unravel_index <- function(indices, dim) { + nrows <- dim[1] + return(sapply( + indices, + FUN = function(x) { + c( + (x - 1) %% nrows + 1, # transformed k tensor position + (x - 1) %/% nrows + 1 # transformed t tensor position + ) + } + )) +} + +#' @description Extract tensor columns specified by .unravel_index output +#' Effectively achieves: +#' +#' self.loadings_matrix_ = hatV[ +#' :, self._k_t_flatten_sort[0], self._k_t_flatten_sort[1] +#' ].T +#' +#' type of indexing in Numpy. +#' +#' Basically performs tensor compression based on the ordered indices specified +#' by each column of k_t_indices. +#' @author Brendan Lu +#' @keywords internal +.extract_tensor_columns <- function(tensor, k_t_indices) { + return(apply( + k_t_indices, 2, + FUN = function(index_column) tensor[, index_column[1], index_column[2]] + )) +} + +#' @description Helper function to convert compressed matrix form of the +#' singular values into a sparse tensor. +#' @param mat Matrix s.t. each column contains the f-diagonal singular values +#' @param dim Dimension of output tensor +#' @author Brendan Lu +#' @keywords internal +.singular_vals_mat_to_tens <- function(mat, dim) { + n <- dim[1] + p <- dim[2] + t <- dim[3] + k <- min(n, p) + + tens <- array(0, dim = dim) + for (i in seq_len(t)) { + tens[1:k, 1:k, i] <- diag(mat[, i]) + } + return(tens) +} + +#' @description Tensor analogue of PCA introduced by Mor et al. (2022) based on +#' Kilmer's m-product algebra and tsvdm. +#' @author Brendan Lu +#' @export +tpca <- function( + x, + ncomp = NULL, + m = NULL, + minv = NULL, + center = TRUE, + matrix_output = TRUE, + bpparam = NULL +) { + if (length(dim(x)) != 3) { + stop("Please ensure input tensor is an order-3 array.") + } else { + n <- dim(x)[1] + p <- dim(x)[2] + t <- dim(x)[3] + k <- min(n, p) + } + + .stop_invalid_transform_input(m, minv) + + # use dctii as default transform if user does not specify an explicit one + if (is.null(m)) { + transforms <- dctii_m_transforms(t, bpparam = bpparam) + m <- transforms$m + minv <- transforms$minv + } + + # bltodo: add scaling as well? + if (center) { + mean_slice <- apply(x, c(2, 3), mean) + x <- sweep(x, c(2, 3), STATS = mean_slice, FUN = "-") + } + + # NOTE: passing in bpparam configuration is not needed, as any + # bpparam specification will already take effect above in the transforms + tsvdm_decomposition <- tsvdm( + x, m, minv, + keep_hats = TRUE, + svals_matrix_form = TRUE + ) + + # flatten out Fortran column major style, then get sort order + singular_values <- as.vector(tsvdm_decomposition$shat) + k_t_flatten_sort <- order(singular_values, decreasing = TRUE) + singular_values <- singular_values[k_t_flatten_sort] + + # get explained variance + # bltodo: check relevant tensor theory for this! + squared_singular_values <- singular_values ^ 2 + total_var <- sum(squared_singular_values) + explained_variance_ratio <- squared_singular_values / total_var + + # process ncomp input + if (is.null(ncomp)) { + ncomp <- k * t + } else if (ncomp > 0 && ncomp < 1) { + # pick ncomp to be minimum number of integer components to explain the + # inputted variance + ratio_cumsum <- cumsum(explained_variance_ratio) + ncomp <- findInterval(ncomp, ratio_cumsum) + 1 + } else if (ncomp %% 1 == 0) { + stopifnot(ncomp > 0 && ncomp <= (k * t)) + } else { + stop("Please input an integer, 0 < float < 1, or NULL for ncomp parameter") + } + + # convert the argsort indexes back into the two dimensional indexes + # corresponding to the singular values matrix + # NOTE: these are the collection if i_h's and j_h's in Mor et al. (2022) + k_t_flatten_sort <- .unravel_index( + k_t_flatten_sort[1:ncomp], + dim(tsvdm_decomposition$shat) + ) + + # compute the values of the projected data + # bltodo: benchmark this against \eqn{new_data *_M vhat} + # bltodo: zero out uhat first? svd truncates it to rank already, but can + # further truncate to ncomp columns? + x_projected <- tsvdm_decomposition$uhat %fp% + .singular_vals_mat_to_tens(tsvdm_decomposition$shat, dim = c(n, p, t)) + + loadings <- tsvdm_decomposition$vhat + + # extract the columns in compressed form + if (matrix_output) { + x_projected <- .extract_tensor_columns(x_projected, k_t_flatten_sort) + loadings <- .extract_tensor_columns( + tsvdm_decomposition$vhat, + k_t_flatten_sort + ) + } + + # BLTODO: (to add in) zero out and compute rho as well + # see _rank_q_truncation_zero_out() in `tred` + # need to use this for tensor output to be appropriate + + return(invisible(list( + ncomp = ncomp, + x = x, + loadings = loadings, + variates = x_projected, # bltodo: maybe just adopt a different name here? + explained_variance = explained_variance_ratio[1:ncomp] + ))) +} diff --git a/R/tens.tpls.R b/R/tens.tpls.R new file mode 100644 index 00000000..58dff84b --- /dev/null +++ b/R/tens.tpls.R @@ -0,0 +1,230 @@ +# ============================================================================== +# Tensor pls generalization; developed @ Melbourne Integrative Genomics based +# on Kilmer's tensor m-product algebra +# ============================================================================== + +#' @description Convert a singular values tensor (in compressed matrix form) to +#' a set of indices corresponding to the (column,face) pairs of the top `ncomp` +#' singular values. NEEDS singular values to be in matrix form. +#' @author Brendan Lu +#' @keywords internal +.obtain_k_t_flatten_sort <- function(s_mat, ncomp) { + # bltodo: if ultimately only use once just place it in function body directly + return( + .unravel_index( + order(as.vector(s_mat))[1:ncomp], + dim(s_mat) + ) + ) +} + +#' @description Get the k, t index corresponding to the largest singular value. +#' Mirrors .unravel_index() but more efficient as we only care about the largest +#' singular value in tpls. +#' @author Brendan Lu +#' @keywords internal +.obtain_k_t_top <- function(s_mat) { + flat_index <- which.max(as.vector(s_mat)) + nrows <- dim(s_mat)[1] + return(c( + (flat_index - 1) %% nrows + 1, # transformed k tensor position + (flat_index - 1) %/% nrows + 1 # transformed t tensor position + )) +} + +#' @author Brendan Lu +#' @export +tpls <- function( + x, + y, + ncomp = NULL, + m = NULL, + minv = NULL, + mode = "regression", + center = TRUE, + matrix_output = TRUE, # FALSE only takes effect for tsvdm method? + bpparam = NULL +) { + # allowed modes mirror sklearn's 2D PLS models + # scikit-learn.org/stable/modules/cross_decomposition.html#cross-decomposition + allowed_modes <- c("canonical", "regression", "tsvdm") + if (!(mode %in% allowed_modes)) { + stop(paste( + "Please ensure mode is one of: ", + paste(allowed_modes, collapse = ", ") + )) + } + + if (length(dim(x)) != 3) { + stop("Please ensure x input tensor is an order-3 array") + } else { + n <- dim(x)[1] + p <- dim(x)[2] + t <- dim(x)[3] + } + + if (length(dim(y)) != 3) { + stop("Please ensure y input tensor is an order-3 array") + } else { + n2 <- dim(y)[1] + q <- dim(y)[2] + t2 <- dim(y)[3] + } + + if (n != n2) { + stop("Please ensure x and y tensor inputs have matching number of samples") + } + + if (t != t2) { + stop("Please ensure x and y tensor inputs have matching number of time + points") + } + + k <- min(n, p, q) + # maximum number of non-zero entries in the f-diagonal singular values tensor + # of tsvdm(XtY) + max_rank <- k * t + + .stop_invalid_transform_input(m, minv) + + # use dctii as default transform if user does not specify an explicit one + if (is.null(m)) { + transforms <- dctii_m_transforms(t, bpparam = bpparam) + m <- transforms$m + minv <- transforms$minv + } + + if (center) { + mean_slice_x <- apply(x, c(2, 3), mean) + mean_slice_y <- apply(y, c(2, 3), mean) + x <- sweep(x, c(2, 3), STATS = mean_slice_x, FUN = "-") + y <- sweep(y, c(2, 3), STATS = mean_slice_y, FUN = "-") + } + + # project x and y into 'hat-space', expensive computation so do it once and + # save into variable + xhat <- m(x) + yhat <- m(y) + + # process ncomp input, much simpler than tpca as we only accept integer input + # bltodo: investigate non integer input options? explained variance semantics? + if (is.null(ncomp)) { + ncomp <- max_rank + } else if (ncomp %% 1 == 0) { + stopifnot(ncomp > 0 && ncomp <= max_rank) + } else { + stop("Please input an integer or NULL for ncomp parameter") + } + + if (mode == "tsvdm") { + # simplest algorithm - just uses everything from the tsvdm call of XtY based + # without any deflation steps + tsvdm_decomposition_xty <- tsvdm( + ft(xhat) %fp% yhat, + transform = FALSE, + svals_matrix_form = TRUE + ) + + # loadings are just the u,v tensors from the tsvdm call + x_loadings <- tsvdm_decomposition_xty$u + y_loadings <- tsvdm_decomposition_xty$v + + # project the scaled / transformed x, y data onto the loadings + x_projected <- xhat %fp% x_loadings + y_projected <- yhat %fp% y_loadings + + if (matrix_output) { + # bltodo: tpls explained variance? + k_t_flatten_sort <- .obtain_k_t_flatten_sort( + tsvdm_decomposition_xty$s, + ncomp + ) + x_loadings <- .extract_tensor_columns(x_loadings, k_t_flatten_sort) + y_loadings <- .extract_tensor_columns(y_loadings, k_t_flatten_sort) + x_projected <- .extract_tensor_columns(x_projected, k_t_flatten_sort) + y_projected <- .extract_tensor_columns(y_projected, k_t_flatten_sort) + } + + return(invisible(list( + ncomp = ncomp, + x = x, + y = y, + x_loadings = x_loadings, + y_loadings = y_loadings, + x_projected = x_projected, + y_projected = y_projected + ))) + + } else if (mode == "canonical" || mode == "regression") { + # preallocate output arrays which we will fill during the iterative process + x_loadings <- array(0, dim = c(p, ncomp)) + y_loadings <- array(0, dim = c(q, ncomp)) + x_projected <- array(0, dim = c(n, ncomp)) + y_projected <- array(0, dim = c(n, ncomp)) + + for (i in seq_len(ncomp)) { + # compute tsvdm + # BLTODO: tensor crossprod to speed up? + tsvdm_decomposition_xty <- tsvdm( + ft(xhat) %fp% yhat, + transform = FALSE, + svals_matrix_form = TRUE + ) + + # get indices corresponding to largest singular value + k_t_top <- .obtain_k_t_top(tsvdm_decomposition_xty$s) + curr_x_loadings <- tsvdm_decomposition_xty$u[, k_t_top[1], k_t_top[2]] + curr_y_loadings <- tsvdm_decomposition_xty$v[, k_t_top[1], k_t_top[2]] + + # note that only one face of xhat and yhat is relevant per iteration + # using k_t_top[2] we can basically just work in matrix world for a bit + curr_x_projected <- xhat[, , k_t_top[2]] %*% curr_x_loadings + curr_y_projected <- yhat[, , k_t_top[2]] %*% curr_y_loadings + + # perform deflation + # the calculation of the x regression coefficient and deflation step of + # xhat is the same regardless of pls mode + curr_x_reg_coef <- crossprod(xhat[, , k_t_top[2]], curr_x_projected) / + as.numeric(crossprod(curr_x_projected, curr_x_projected)) + + xhat[, , k_t_top[2]] <- xhat[, , k_t_top[2]] - + tcrossprod(curr_x_projected, curr_x_reg_coef) + + # the calculation of the y regression coefficient and deflation step of + # yhat differs depending on the pls mode + if (mode == "canonical") { + curr_y_reg_coef <- crossprod(yhat[, , k_t_top[2]], curr_y_projected) / + as.numeric(crossprod(curr_y_projected, curr_y_projected)) + + yhat[, , k_t_top[2]] <- yhat[, , k_t_top[2]] - + tcrossprod(curr_y_projected, curr_y_reg_coef) + } + + if (mode == "regression") { + curr_y_reg_coef <- crossprod(yhat[, , k_t_top[2]], curr_x_projected) / + as.numeric(crossprod(curr_x_projected, curr_x_projected)) + + yhat[, , k_t_top[2]] <- yhat[, , k_t_top[2]] - + tcrossprod(curr_x_projected, curr_y_reg_coef) + } + + x_loadings[, i] <- curr_x_loadings + y_loadings[, i] <- curr_y_loadings + x_projected[, i] <- curr_x_projected + y_projected[, i] <- curr_y_projected + } + + return(invisible(list( + ncomp = ncomp, + x = x, + y = y, + x_loadings = x_loadings, + y_loadings = y_loadings, + x_projected = x_projected, + y_projected = y_projected + ))) + + } else { + stop("Unexpected error in tpls, check 'mode' parameter input") + } +} diff --git a/R/tens.tsvdm.R b/R/tens.tsvdm.R new file mode 100644 index 00000000..b8c89f8e --- /dev/null +++ b/R/tens.tsvdm.R @@ -0,0 +1,75 @@ +# ============================================================================== +# Kilmer's t-SVD decomposition for order-3 tensors +# ============================================================================== + +#' @description Return the t-SVDM decomposition from Kilmer et al. (2021). +#' @author Brendan Lu +#' @export +tsvdm <- function( + x, + m = NULL, + minv = NULL, + transform = TRUE, # differs to tred, control initial transform to m-space + keep_hats = FALSE, + svals_matrix_form = FALSE, + bpparam = NULL +) { + if (length(dim(x)) != 3) { + stop("Please ensure input tensor is an order-3 array.") + } else { + n <- dim(x)[1] + p <- dim(x)[2] + t <- dim(x)[3] + k <- min(n, p) + } + + if (transform) { + .stop_invalid_transform_input(m, minv) + + # use dctii as default transform if user does not specify an explicit one + if (is.null(m)) { + transforms <- dctii_m_transforms(t, bpparam = bpparam) + m <- transforms$m + minv <- transforms$minv + } + + x <- m(x) + } + + u <- array(0, dim = c(n, k, t)) + v <- array(0, dim = c(p, k, t)) + + # bltodo: less ugly way to write this? + if (svals_matrix_form) { + s <- array(0, dim = c(k, t)) + for (i in seq_len(t)) { + facewise_svd <- svd(x[, , i]) + u[, , i] <- facewise_svd$u + s[, i] <- facewise_svd$d + v[, , i] <- facewise_svd$v + } + } else { + s <- array(0, dim = c(k, k, t)) + for (i in seq_len(t)) { + facewise_svd <- svd(x[, , i]) + u[, , i] <- facewise_svd$u + s[, , i] <- diag(facewise_svd$d) + v[, , i] <- facewise_svd$v + } + } + + if (transform && keep_hats) { + # make clear returning in hat space + return(list(uhat = u, shat = s, vhat = v)) + } else { + if (transform) { + # minv will work on s regardless of what form it is in + output <- lapply(list(u, s, v), minv) + names(output) <- c("u", "s", "v") + return(output) + } else { + output <- list(u = u, s = s, v = v) + return(output) + } + } +} diff --git a/R/utils-argument_checkers.R b/R/utils-argument_checkers.R index 26efbf37..1002a017 100644 --- a/R/utils-argument_checkers.R +++ b/R/utils-argument_checkers.R @@ -76,7 +76,6 @@ #' @return test.keepX, possibly re-ordered by names for list X #' @noRd #' @keywords Internal -#' @examples #' .check_test.keepX <- function(test.keepX, X, @@ -172,7 +171,7 @@ #' \dontrun{ #' foo <- function(a=TRUE) .check_logical(a) #' foo(a = 1) -#' #> Error: ‘a’ must be a class logical (TRUE or FALSE). +#' #> Error: 'a' must be a class logical (TRUE or FALSE). #' } .check_logical <- function(arg) { diff --git a/man/mixOmics-package.Rd b/man/mixOmics-package.Rd index e9927dfc..3d720906 100644 --- a/man/mixOmics-package.Rd +++ b/man/mixOmics-package.Rd @@ -1,6 +1,6 @@ % Generated by roxygen2: do not edit by hand % Please edit documentation in R/mixOmics-package.R -\docType{package} +\docType{_PACKAGE} \name{mixOmics-package} \alias{mixOmics-package} \title{'Omics Data Integration Project} diff --git a/renv.lock b/renv.lock index 32fb817f..6be25c26 100644 --- a/renv.lock +++ b/renv.lock @@ -1,7 +1,27 @@ { "R": { - "Version": "4.2.1", + "Version": "4.4.1", "Repositories": [ + { + "Name": "BioCsoft", + "URL": "https://bioconductor.org/packages/3.15/bioc" + }, + { + "Name": "BioCann", + "URL": "https://bioconductor.org/packages/3.15/data/annotation" + }, + { + "Name": "BioCexp", + "URL": "https://bioconductor.org/packages/3.15/data/experiment" + }, + { + "Name": "BioCworkflows", + "URL": "https://bioconductor.org/packages/3.15/workflows" + }, + { + "Name": "BioCbooks", + "URL": "https://bioconductor.org/packages/3.15/books" + }, { "Name": "CRAN", "URL": "https://cran.r-project.org" @@ -14,156 +34,192 @@ "Packages": { "BH": { "Package": "BH", - "Version": "1.78.0-0", + "Version": "1.84.0-0", "Source": "Repository", "Repository": "CRAN", - "Hash": "4e348572ffcaa2fb1e610e7a941f6f3a", - "Requirements": [] + "Hash": "a8235afbcd6316e6e91433ea47661013" }, "BiocManager": { "Package": "BiocManager", - "Version": "1.30.18", + "Version": "1.30.23", "Source": "Repository", "Repository": "CRAN", - "Hash": "b1a93bed5debda5775636086fdca017b", - "Requirements": [] + "Requirements": [ + "utils" + ], + "Hash": "47e968dfe563c1b22c2e20a067ec21d5" }, "BiocParallel": { "Package": "BiocParallel", - "Version": "1.30.3", + "Version": "1.30.4", "Source": "Bioconductor", - "git_url": "https://git.bioconductor.org/packages/BiocParallel", - "git_branch": "RELEASE_3_15", - "git_last_commit": "f753ae8", - "git_last_commit_date": "2022-06-04", - "Hash": "6679083c9b26ccd9f474ac5ad0bf5ea6", "Requirements": [ "BH", + "R", "codetools", "futile.logger", - "snow" - ] + "methods", + "parallel", + "snow", + "stats", + "utils" + ], + "Hash": "979598ec1228a442839526dc8578be36" }, "BiocStyle": { "Package": "BiocStyle", "Version": "2.24.0", "Source": "Bioconductor", - "git_url": "https://git.bioconductor.org/packages/BiocStyle", - "git_branch": "RELEASE_3_15", - "git_last_commit": "53095b5", - "git_last_commit_date": "2022-04-26", - "Hash": "7ba75b2a79d4efbf69dbcfc9334b5fa3", "Requirements": [ "BiocManager", "bookdown", "knitr", "rmarkdown", + "stats", + "utils", "yaml" - ] + ], + "Hash": "d3911eced000c529abc6f8fa711f3893" + }, + "BiocVersion": { + "Package": "BiocVersion", + "Version": "3.15.2", + "Source": "Bioconductor", + "Requirements": [ + "R" + ], + "Hash": "48e545a2fa24a967de72b1114bfe96ac" }, "MASS": { "Package": "MASS", - "Version": "7.3-57", + "Version": "7.3-61", "Source": "Repository", "Repository": "CRAN", - "Hash": "71476c1d88d1ebdf31580e5a257d5d31", - "Requirements": [] + "Requirements": [ + "R", + "grDevices", + "graphics", + "methods", + "stats", + "utils" + ], + "Hash": "0cafd6f0500e5deba33be22c46bf6055" }, "Matrix": { "Package": "Matrix", - "Version": "1.4-1", + "Version": "1.7-0", "Source": "Repository", "Repository": "CRAN", - "Hash": "699c47c606293bdfbc9fd78a93c9c8fe", "Requirements": [ - "lattice" - ] + "R", + "grDevices", + "graphics", + "grid", + "lattice", + "methods", + "stats", + "utils" + ], + "Hash": "1920b2f11133b12350024297d8a4ff4a" }, "R6": { "Package": "R6", "Version": "2.5.1", "Source": "Repository", "Repository": "CRAN", - "Hash": "470851b6d5d0ac559e9d01bb352b4021", - "Requirements": [] + "Requirements": [ + "R" + ], + "Hash": "470851b6d5d0ac559e9d01bb352b4021" }, "RColorBrewer": { "Package": "RColorBrewer", "Version": "1.1-3", "Source": "Repository", "Repository": "CRAN", - "Hash": "45f0398006e83a5b10b72a90663d8d8c", - "Requirements": [] + "Requirements": [ + "R" + ], + "Hash": "45f0398006e83a5b10b72a90663d8d8c" }, "RSpectra": { "Package": "RSpectra", - "Version": "0.16-1", + "Version": "0.16-2", "Source": "Repository", "Repository": "CRAN", - "Hash": "6b5ab997fd5ff6d46a5f1d9f8b76961c", "Requirements": [ "Matrix", + "R", "Rcpp", "RcppEigen" - ] + ], + "Hash": "5ffd7a70479497271e57cd0cc2465b3b" }, "Rcpp": { "Package": "Rcpp", - "Version": "1.0.9", + "Version": "1.0.13", "Source": "Repository", "Repository": "CRAN", - "Hash": "e9c08b94391e9f3f97355841229124f2", - "Requirements": [] + "Requirements": [ + "methods", + "utils" + ], + "Hash": "f27411eb6d9c3dada5edd444b8416675" }, "RcppEigen": { "Package": "RcppEigen", - "Version": "0.3.3.9.2", + "Version": "0.3.4.0.2", "Source": "Repository", "Repository": "CRAN", - "Hash": "4c86baed78388ceb06f88e3e9a1d87f5", "Requirements": [ - "Matrix", - "Rcpp" - ] + "R", + "Rcpp", + "stats", + "utils" + ], + "Hash": "4ac8e423216b8b70cb9653d1b3f71eb9" }, "askpass": { "Package": "askpass", - "Version": "1.1", + "Version": "1.2.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "e8a22846fff485f0be3770c2da758713", "Requirements": [ "sys" - ] + ], + "Hash": "cad6cf7f1d5f6e906700b9d3e718c796" }, "badger": { "Package": "badger", - "Version": "0.2.1", + "Version": "0.2.4", "Source": "Repository", "Repository": "CRAN", - "Hash": "947d26ca2f684ec7906181770c86b74b", "Requirements": [ + "R", "desc", "dlstats", "rvcheck", "usethis" - ] + ], + "Hash": "1646e3fbd4138f3e3e2cf2f686c73501" }, "base64enc": { "Package": "base64enc", "Version": "0.1-3", "Source": "Repository", "Repository": "CRAN", - "Hash": "543776ae6848fde2f48ff3816d0628bc", - "Requirements": [] + "Requirements": [ + "R" + ], + "Hash": "543776ae6848fde2f48ff3816d0628bc" }, "bookdown": { "Package": "bookdown", - "Version": "0.28", + "Version": "0.40", "Source": "Repository", "Repository": "CRAN", - "Hash": "588be2b78d6887d724e82980c1135a25", "Requirements": [ + "R", "htmltools", "jquerylib", "knitr", @@ -171,296 +227,366 @@ "tinytex", "xfun", "yaml" - ] + ], + "Hash": "896a79478a50c78fb035a37148638f4e" }, "brio": { "Package": "brio", - "Version": "1.1.3", + "Version": "1.1.5", "Source": "Repository", "Repository": "CRAN", - "Hash": "976cf154dfb043c012d87cddd8bca363", - "Requirements": [] + "Requirements": [ + "R" + ], + "Hash": "c1ee497a6d999947c2c224ae46799b1a" }, "bslib": { "Package": "bslib", - "Version": "0.4.0", + "Version": "0.7.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "be5ee090716ce1671be6cd5d7c34d091", "Requirements": [ + "R", + "base64enc", "cachem", + "fastmap", + "grDevices", "htmltools", "jquerylib", "jsonlite", + "lifecycle", "memoise", + "mime", "rlang", "sass" - ] + ], + "Hash": "8644cc53f43828f19133548195d7e59e" }, "cachem": { "Package": "cachem", - "Version": "1.0.6", + "Version": "1.1.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "648c5b3d71e6a37e3043617489a0a0e9", "Requirements": [ "fastmap", "rlang" - ] + ], + "Hash": "cd9a672193789068eb5a2aad65a0dedf" }, "callr": { "Package": "callr", - "Version": "3.7.1", + "Version": "3.7.6", "Source": "Repository", "Repository": "CRAN", - "Hash": "2fda237f24bc56508f31394beaa56877", "Requirements": [ + "R", "R6", - "processx" - ] + "processx", + "utils" + ], + "Hash": "d7e13f49c19103ece9e58ad2d83a7354" }, "cli": { "Package": "cli", - "Version": "3.3.0", + "Version": "3.6.3", "Source": "Repository", "Repository": "CRAN", - "Hash": "23abf173c2b783dcc43379ab9bba00ee", "Requirements": [ - "glue" - ] + "R", + "utils" + ], + "Hash": "b21916dd77a27642b447374a5d30ecf3" }, "clipr": { "Package": "clipr", "Version": "0.8.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "3f038e5ac7f41d4ac41ce658c85e3042", - "Requirements": [] + "Requirements": [ + "utils" + ], + "Hash": "3f038e5ac7f41d4ac41ce658c85e3042" }, "codetools": { "Package": "codetools", - "Version": "0.2-18", + "Version": "0.2-20", "Source": "Repository", "Repository": "CRAN", - "Hash": "019388fc48e48b3da0d3a76ff94608a8", - "Requirements": [] + "Requirements": [ + "R" + ], + "Hash": "61e097f35917d342622f21cdc79c256e" }, "colorspace": { "Package": "colorspace", - "Version": "2.0-3", + "Version": "2.1-1", "Source": "Repository", "Repository": "CRAN", - "Hash": "bb4341986bc8b914f0f0acf2e4a3f2f7", - "Requirements": [] + "Requirements": [ + "R", + "grDevices", + "graphics", + "methods", + "stats" + ], + "Hash": "d954cb1c57e8d8b756165d7ba18aa55a" }, "corpcor": { "Package": "corpcor", "Version": "1.6.10", "Source": "Repository", "Repository": "CRAN", - "Hash": "17ebe3b6d75d09c5bab3891880b34237", - "Requirements": [] + "Requirements": [ + "R", + "stats" + ], + "Hash": "17ebe3b6d75d09c5bab3891880b34237" }, "cpp11": { "Package": "cpp11", - "Version": "0.4.2", + "Version": "0.5.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "fa53ce256cd280f468c080a58ea5ba8c", - "Requirements": [] + "Requirements": [ + "R" + ], + "Hash": "91570bba75d0c9d3f1040c835cee8fba" }, "crayon": { "Package": "crayon", - "Version": "1.5.1", + "Version": "1.5.3", "Source": "Repository", "Repository": "CRAN", - "Hash": "8dc45fd8a1ee067a92b85ef274e66d6a", - "Requirements": [] + "Requirements": [ + "grDevices", + "methods", + "utils" + ], + "Hash": "859d96e65ef198fd43e82b9628d593ef" }, "credentials": { "Package": "credentials", - "Version": "1.3.2", + "Version": "2.0.1", "Source": "Repository", "Repository": "CRAN", - "Hash": "93762d0a34d78e6a025efdbfb5c6bb41", "Requirements": [ "askpass", "curl", "jsonlite", "openssl", "sys" - ] + ], + "Hash": "c7844b32098dcbd1c59cbd8dddb4ecc6" }, "curl": { "Package": "curl", - "Version": "4.3.2", + "Version": "5.2.1", "Source": "Repository", "Repository": "CRAN", - "Hash": "022c42d49c28e95d69ca60446dbabf88", - "Requirements": [] + "Requirements": [ + "R" + ], + "Hash": "411ca2c03b1ce5f548345d2fc2685f7a" }, "desc": { "Package": "desc", - "Version": "1.4.1", + "Version": "1.4.3", "Source": "Repository", "Repository": "CRAN", - "Hash": "eebd27ee58fcc58714eedb7aa07d8ad1", "Requirements": [ + "R", "R6", "cli", - "rprojroot" - ] + "utils" + ], + "Hash": "99b79fcbd6c4d1ce087f5c5c758b384f" }, "diffobj": { "Package": "diffobj", "Version": "0.3.5", "Source": "Repository", "Repository": "CRAN", - "Hash": "bcaa8b95f8d7d01a5dedfd959ce88ab8", "Requirements": [ - "crayon" - ] + "R", + "crayon", + "methods", + "stats", + "tools", + "utils" + ], + "Hash": "bcaa8b95f8d7d01a5dedfd959ce88ab8" }, "digest": { "Package": "digest", - "Version": "0.6.29", + "Version": "0.6.36", "Source": "Repository", "Repository": "CRAN", - "Hash": "cf6b206a045a684728c3267ef7596190", - "Requirements": [] + "Requirements": [ + "R", + "utils" + ], + "Hash": "fd6824ad91ede64151e93af67df6376b" }, "dlstats": { "Package": "dlstats", - "Version": "0.1.5", + "Version": "0.1.7", "Source": "Repository", "Repository": "CRAN", - "Hash": "73932ec955228bb5d6b1c06dff3b2481", "Requirements": [ + "R", "RColorBrewer", "ggplot2", "jsonlite", "magrittr", - "scales" - ] + "scales", + "utils" + ], + "Hash": "e9ba1c73457c25b19b94bd71eceb38ab" }, "dplyr": { "Package": "dplyr", - "Version": "1.0.9", + "Version": "1.1.4", "Source": "Repository", "Repository": "CRAN", - "Hash": "f0bda1627a7f5d3f9a0b5add931596ac", "Requirements": [ + "R", "R6", + "cli", "generics", "glue", "lifecycle", "magrittr", + "methods", "pillar", "rlang", "tibble", "tidyselect", + "utils", "vctrs" - ] + ], + "Hash": "fedd9d00c2944ff00a0e2696ccf048ec" }, "ellipse": { "Package": "ellipse", - "Version": "0.4.3", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "feedb4386d9c3c157989ca7af419c892", - "Requirements": [] - }, - "ellipsis": { - "Package": "ellipsis", - "Version": "0.3.2", + "Version": "0.5.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "bb0eec2fe32e88d9e2836c2f73ea2077", "Requirements": [ - "rlang" - ] + "R", + "graphics", + "stats" + ], + "Hash": "4aa52573ccedf7dc635a0eb471944a36" }, "evaluate": { "Package": "evaluate", - "Version": "0.16", + "Version": "0.24.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "9a3d3c345f8a5648abe61608aaa29518", - "Requirements": [] + "Requirements": [ + "R", + "methods" + ], + "Hash": "8e982ed8557632cb53d7cb2fc94613af" }, "fansi": { "Package": "fansi", - "Version": "1.0.3", + "Version": "1.0.6", "Source": "Repository", "Repository": "CRAN", - "Hash": "83a8afdbe71839506baa9f90eebad7ec", - "Requirements": [] + "Requirements": [ + "R", + "grDevices", + "utils" + ], + "Hash": "962174cf2aeb5b9eea581522286a911f" }, "farver": { "Package": "farver", - "Version": "2.1.1", + "Version": "2.1.2", "Source": "Repository", "Repository": "CRAN", - "Hash": "8106d78941f34855c440ddb946b8f7a5", - "Requirements": [] + "Hash": "680887028577f3fa2a81e410ed0d6e42" }, "fastmap": { "Package": "fastmap", - "Version": "1.1.0", + "Version": "1.2.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "77bd60a6157420d4ffa93b27cf6a58b8", - "Requirements": [] + "Hash": "aa5e1cd11c2d15497494c5292d7ffcc8" + }, + "fontawesome": { + "Package": "fontawesome", + "Version": "0.5.2", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "htmltools", + "rlang" + ], + "Hash": "c2efdd5f0bcd1ea861c2d4e2a883a67d" }, "formatR": { "Package": "formatR", - "Version": "1.12", + "Version": "1.14", "Source": "Repository", "Repository": "CRAN", - "Hash": "e45696cc90f4d5b993fa1a289e01c5df", - "Requirements": [] + "Requirements": [ + "R" + ], + "Hash": "63cb26d12517c7863f5abb006c5e0f25" }, "fs": { "Package": "fs", - "Version": "1.5.2", + "Version": "1.6.4", "Source": "Repository", "Repository": "CRAN", - "Hash": "7c89603d81793f0d5486d91ab1fc6f1d", - "Requirements": [] + "Requirements": [ + "R", + "methods" + ], + "Hash": "15aeb8c27f5ea5161f9f6a641fafd93a" }, "futile.logger": { "Package": "futile.logger", "Version": "1.4.3", "Source": "Repository", "Repository": "CRAN", - "Hash": "99f0ace8c05ec7d3683d27083c4f1e7e", "Requirements": [ + "R", "futile.options", - "lambda.r" - ] + "lambda.r", + "utils" + ], + "Hash": "99f0ace8c05ec7d3683d27083c4f1e7e" }, "futile.options": { "Package": "futile.options", "Version": "1.0.1", "Source": "Repository", "Repository": "CRAN", - "Hash": "0d9bf02413ddc2bbe8da9ce369dcdd2b", - "Requirements": [] + "Requirements": [ + "R" + ], + "Hash": "0d9bf02413ddc2bbe8da9ce369dcdd2b" }, "generics": { "Package": "generics", "Version": "0.1.3", "Source": "Repository", "Repository": "CRAN", - "Hash": "15e9634c0fcd294799e9b2e929ed1b86", - "Requirements": [] + "Requirements": [ + "R", + "methods" + ], + "Hash": "15e9634c0fcd294799e9b2e929ed1b86" }, "gert": { "Package": "gert", - "Version": "1.7.0", + "Version": "2.1.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "e8432885a6c629a693c6e0716d1b68ec", "Requirements": [ "askpass", "credentials", @@ -468,317 +594,419 @@ "rstudioapi", "sys", "zip" - ] + ], + "Hash": "bdc909d9f16e2478d615b0e6a7330435" }, "ggplot2": { "Package": "ggplot2", - "Version": "3.3.6", + "Version": "3.5.1", "Source": "Repository", "Repository": "CRAN", - "Hash": "0fb26d0674c82705c6b701d1a61e02ea", "Requirements": [ "MASS", - "digest", + "R", + "cli", "glue", + "grDevices", + "grid", "gtable", "isoband", + "lifecycle", "mgcv", "rlang", "scales", + "stats", "tibble", + "vctrs", "withr" - ] + ], + "Hash": "44c6a2f8202d5b7e878ea274b1092426" }, "ggrepel": { "Package": "ggrepel", - "Version": "0.9.1", + "Version": "0.9.6", "Source": "Repository", "Repository": "CRAN", - "Hash": "08ab869f37e6a7741a64ab9069bcb67d", "Requirements": [ + "R", "Rcpp", "ggplot2", + "grid", "rlang", - "scales" - ] + "scales", + "withr" + ], + "Hash": "3d4156850acc1161f2f24bc61c9217c1" }, "gh": { "Package": "gh", - "Version": "1.3.0", + "Version": "1.4.1", "Source": "Repository", "Repository": "CRAN", - "Hash": "38c2580abbda249bd6afeec00d14f531", "Requirements": [ + "R", "cli", "gitcreds", - "httr", + "glue", + "httr2", "ini", - "jsonlite" - ] + "jsonlite", + "lifecycle", + "rlang" + ], + "Hash": "fbbbc48eba7a6626a08bb365e44b563b" }, "gitcreds": { "Package": "gitcreds", - "Version": "0.1.1", + "Version": "0.1.2", "Source": "Repository", "Repository": "CRAN", - "Hash": "f3aefccc1cc50de6338146b62f115de8", - "Requirements": [] + "Requirements": [ + "R" + ], + "Hash": "ab08ac61f3e1be454ae21911eb8bc2fe" }, "glue": { "Package": "glue", - "Version": "1.6.2", + "Version": "1.7.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "4f2596dfb05dac67b9dc558e5c6fba2e", - "Requirements": [] + "Requirements": [ + "R", + "methods" + ], + "Hash": "e0b3a53876554bd45879e596cdb10a52" }, "gridExtra": { "Package": "gridExtra", "Version": "2.3", "Source": "Repository", "Repository": "CRAN", - "Hash": "7d7f283939f563670a697165b2cf5560", "Requirements": [ - "gtable" - ] + "grDevices", + "graphics", + "grid", + "gtable", + "utils" + ], + "Hash": "7d7f283939f563670a697165b2cf5560" + }, + "gsignal": { + "Package": "gsignal", + "Version": "0.3-6", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "Rcpp", + "grDevices", + "pracma" + ], + "Hash": "89717f7be39bd2d35a7a78dcfae516bd" }, "gtable": { "Package": "gtable", - "Version": "0.3.0", + "Version": "0.3.5", "Source": "Repository", "Repository": "CRAN", - "Hash": "ac5c6baf7822ce8732b343f14c072c4d", - "Requirements": [] + "Requirements": [ + "R", + "cli", + "glue", + "grid", + "lifecycle", + "rlang" + ], + "Hash": "e18861963cbc65a27736e02b3cd3c4a0" }, "highr": { "Package": "highr", - "Version": "0.9", + "Version": "0.11", "Source": "Repository", "Repository": "CRAN", - "Hash": "8eb36c8125038e648e5d111c0d7b2ed4", "Requirements": [ + "R", "xfun" - ] + ], + "Hash": "d65ba49117ca223614f71b60d85b8ab7" }, "htmltools": { "Package": "htmltools", - "Version": "0.5.3", + "Version": "0.5.8.1", "Source": "Repository", "Repository": "CRAN", - "Hash": "6496090a9e00f8354b811d1a2d47b566", "Requirements": [ + "R", "base64enc", "digest", "fastmap", - "rlang" - ] + "grDevices", + "rlang", + "utils" + ], + "Hash": "81d371a9cc60640e74e4ab6ac46dcedc" }, "htmlwidgets": { "Package": "htmlwidgets", - "Version": "1.5.4", + "Version": "1.6.4", "Source": "Repository", "Repository": "CRAN", - "Hash": "76147821cd3fcd8c4b04e1ef0498e7fb", "Requirements": [ + "grDevices", "htmltools", "jsonlite", + "knitr", + "rmarkdown", "yaml" - ] + ], + "Hash": "04291cc45198225444a397606810ac37" }, - "httr": { - "Package": "httr", - "Version": "1.4.3", + "httr2": { + "Package": "httr2", + "Version": "1.0.2", "Source": "Repository", "Repository": "CRAN", - "Hash": "88d1b310583777edf01ccd1216fb0b2b", "Requirements": [ + "R", "R6", + "cli", "curl", - "jsonlite", - "mime", - "openssl" - ] + "glue", + "lifecycle", + "magrittr", + "openssl", + "rappdirs", + "rlang", + "vctrs", + "withr" + ], + "Hash": "320c8fe23fcb25a6690ef7bdb6a3a705" }, "igraph": { "Package": "igraph", - "Version": "1.3.4", + "Version": "2.0.3", "Source": "Repository", "Repository": "CRAN", - "Hash": "40e38ff98f90967805831b6234afd05f", "Requirements": [ "Matrix", + "R", + "cli", + "cpp11", + "grDevices", + "graphics", + "lifecycle", "magrittr", + "methods", "pkgconfig", - "rlang" - ] + "rlang", + "stats", + "utils", + "vctrs" + ], + "Hash": "c3b7d801d722e26e4cd888e042bf9af5" }, "ini": { "Package": "ini", "Version": "0.3.1", "Source": "Repository", "Repository": "CRAN", - "Hash": "6154ec2223172bce8162d4153cda21f7", - "Requirements": [] + "Hash": "6154ec2223172bce8162d4153cda21f7" }, "isoband": { "Package": "isoband", - "Version": "0.2.5", + "Version": "0.2.7", "Source": "Repository", "Repository": "CRAN", - "Hash": "7ab57a6de7f48a8dc84910d1eca42883", - "Requirements": [] + "Requirements": [ + "grid", + "utils" + ], + "Hash": "0080607b4a1a7b28979aecef976d8bc2" }, "jquerylib": { "Package": "jquerylib", "Version": "0.1.4", "Source": "Repository", "Repository": "CRAN", - "Hash": "5aab57a3bd297eee1c1d862735972182", "Requirements": [ "htmltools" - ] + ], + "Hash": "5aab57a3bd297eee1c1d862735972182" }, "jsonlite": { "Package": "jsonlite", - "Version": "1.8.0", + "Version": "1.8.8", "Source": "Repository", "Repository": "CRAN", - "Hash": "d07e729b27b372429d42d24d503613a0", - "Requirements": [] + "Requirements": [ + "methods" + ], + "Hash": "e1b9c55281c5adc4dd113652d9e26768" }, "knitr": { "Package": "knitr", - "Version": "1.39", + "Version": "1.48", "Source": "Repository", "Repository": "CRAN", - "Hash": "029ab7c4badd3cf8af69016b2ba27493", "Requirements": [ + "R", "evaluate", "highr", - "stringr", + "methods", + "tools", "xfun", "yaml" - ] + ], + "Hash": "acf380f300c721da9fde7df115a5f86f" }, "labeling": { "Package": "labeling", - "Version": "0.4.2", + "Version": "0.4.3", "Source": "Repository", "Repository": "CRAN", - "Hash": "3d5108641f47470611a32d0bdf357a72", - "Requirements": [] + "Requirements": [ + "graphics", + "stats" + ], + "Hash": "b64ec208ac5bc1852b285f665d6368b3" }, "lambda.r": { "Package": "lambda.r", "Version": "1.2.4", "Source": "Repository", "Repository": "CRAN", - "Hash": "b1e925c4b9ffeb901bacf812cbe9a6ad", "Requirements": [ + "R", "formatR" - ] + ], + "Hash": "b1e925c4b9ffeb901bacf812cbe9a6ad" }, "lattice": { "Package": "lattice", - "Version": "0.20-45", + "Version": "0.22-6", "Source": "Repository", "Repository": "CRAN", - "Hash": "b64cdbb2b340437c4ee047a1f4c4377b", - "Requirements": [] + "Requirements": [ + "R", + "grDevices", + "graphics", + "grid", + "stats", + "utils" + ], + "Hash": "cc5ac1ba4c238c7ca9fa6a87ca11a7e2" }, "lifecycle": { "Package": "lifecycle", - "Version": "1.0.1", + "Version": "1.0.4", "Source": "Repository", "Repository": "CRAN", - "Hash": "a6b6d352e3ed897373ab19d8395c98d0", "Requirements": [ + "R", + "cli", "glue", "rlang" - ] + ], + "Hash": "b8552d117e1b808b09a832f589b79035" }, "magrittr": { "Package": "magrittr", "Version": "2.0.3", "Source": "Repository", "Repository": "CRAN", - "Hash": "7ce2733a9826b3aeb1775d56fd305472", - "Requirements": [] + "Requirements": [ + "R" + ], + "Hash": "7ce2733a9826b3aeb1775d56fd305472" }, "matrixStats": { "Package": "matrixStats", - "Version": "0.62.0", + "Version": "1.4.1", "Source": "Repository", "Repository": "CRAN", - "Hash": "36ad89a805c436c5316c22490079da67", - "Requirements": [] + "Requirements": [ + "R" + ], + "Hash": "8885ffb1f46e820dede6b2ca9442abca" }, "memoise": { "Package": "memoise", "Version": "2.0.1", "Source": "Repository", "Repository": "CRAN", - "Hash": "e2817ccf4a065c5d9d7f2cfbe7c1d78c", "Requirements": [ "cachem", "rlang" - ] + ], + "Hash": "e2817ccf4a065c5d9d7f2cfbe7c1d78c" }, "mgcv": { "Package": "mgcv", - "Version": "1.8-40", + "Version": "1.9-1", "Source": "Repository", "Repository": "CRAN", - "Hash": "c6b2fdb18cf68ab613bd564363e1ba0d", "Requirements": [ "Matrix", - "nlme" - ] + "R", + "graphics", + "methods", + "nlme", + "splines", + "stats", + "utils" + ], + "Hash": "110ee9d83b496279960e162ac97764ce" }, "mime": { "Package": "mime", "Version": "0.12", "Source": "Repository", "Repository": "CRAN", - "Hash": "18e9c28c1d3ca1560ce30658b22ce104", - "Requirements": [] + "Requirements": [ + "tools" + ], + "Hash": "18e9c28c1d3ca1560ce30658b22ce104" }, "munsell": { "Package": "munsell", - "Version": "0.5.0", + "Version": "0.5.1", "Source": "Repository", "Repository": "CRAN", - "Hash": "6dfe8bf774944bd5595785e3229d8771", "Requirements": [ - "colorspace" - ] + "colorspace", + "methods" + ], + "Hash": "4fd8900853b746af55b81fda99da7695" }, "nlme": { "Package": "nlme", - "Version": "3.1-157", + "Version": "3.1-165", "Source": "Repository", "Repository": "CRAN", - "Hash": "dbca60742be0c9eddc5205e5c7ca1f44", "Requirements": [ - "lattice" - ] + "R", + "graphics", + "lattice", + "stats", + "utils" + ], + "Hash": "2769a88be217841b1f33ed469675c3cc" }, "openssl": { "Package": "openssl", - "Version": "2.0.2", + "Version": "2.2.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "6d3bef2e305f55c705c674653c7d7d3d", "Requirements": [ "askpass" - ] + ], + "Hash": "2bcca3848e4734eb3b16103bc9aa4b8e" }, "pillar": { "Package": "pillar", - "Version": "1.8.0", + "Version": "1.9.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "f95cf85794546c4ac2b9a6ca42e671ff", "Requirements": [ "cli", "fansi", @@ -786,363 +1014,454 @@ "lifecycle", "rlang", "utf8", + "utils", "vctrs" - ] + ], + "Hash": "15da5a8412f317beeee6175fbc76f4bb" + }, + "pkgbuild": { + "Package": "pkgbuild", + "Version": "1.4.4", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "R6", + "callr", + "cli", + "desc", + "processx" + ], + "Hash": "a29e8e134a460a01e0ca67a4763c595b" }, "pkgconfig": { "Package": "pkgconfig", "Version": "2.0.3", "Source": "Repository", "Repository": "CRAN", - "Hash": "01f28d4278f15c76cddbea05899c5d6f", - "Requirements": [] + "Requirements": [ + "utils" + ], + "Hash": "01f28d4278f15c76cddbea05899c5d6f" }, "pkgload": { "Package": "pkgload", - "Version": "1.3.0", + "Version": "1.4.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "4b20f937a363c78a5730265c1925f54a", "Requirements": [ + "R", "cli", - "crayon", "desc", "fs", "glue", + "lifecycle", + "methods", + "pkgbuild", + "processx", "rlang", "rprojroot", + "utils", "withr" - ] + ], + "Hash": "2ec30ffbeec83da57655b850cf2d3e0e" }, "plyr": { "Package": "plyr", - "Version": "1.8.7", + "Version": "1.8.9", "Source": "Repository", "Repository": "CRAN", - "Hash": "9c17c6ee41639ebdc1d7266546d3b627", "Requirements": [ + "R", "Rcpp" - ] + ], + "Hash": "6b8177fd19982f0020743fadbfdbd933" + }, + "pracma": { + "Package": "pracma", + "Version": "2.4.4", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "grDevices", + "graphics", + "stats", + "utils" + ], + "Hash": "44bc172d47d1ea0a638d9f299e321203" }, "praise": { "Package": "praise", "Version": "1.0.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "a555924add98c99d2f411e37e7d25e9f", - "Requirements": [] + "Hash": "a555924add98c99d2f411e37e7d25e9f" }, "processx": { "Package": "processx", - "Version": "3.7.0", + "Version": "3.8.4", "Source": "Repository", "Repository": "CRAN", - "Hash": "f91df0f5f31ffdf88bc0b624f5ebab0f", "Requirements": [ + "R", "R6", - "ps" - ] + "ps", + "utils" + ], + "Hash": "0c90a7d71988856bad2a2a45dd871bb9" }, "ps": { "Package": "ps", - "Version": "1.7.1", + "Version": "1.7.7", "Source": "Repository", "Repository": "CRAN", - "Hash": "8b93531308c01ad0e56d9eadcc0c4fcd", - "Requirements": [] + "Requirements": [ + "R", + "utils" + ], + "Hash": "878b467580097e9c383acbb16adab57a" }, "purrr": { "Package": "purrr", - "Version": "0.3.4", + "Version": "1.0.2", "Source": "Repository", "Repository": "CRAN", - "Hash": "97def703420c8ab10d8f0e6c72101e02", "Requirements": [ + "R", + "cli", + "lifecycle", "magrittr", - "rlang" - ] + "rlang", + "vctrs" + ], + "Hash": "1cba04a4e9414bdefc9dcaa99649a8dc" }, "rARPACK": { "Package": "rARPACK", "Version": "0.11-0", "Source": "Repository", "Repository": "CRAN", - "Hash": "fa4822bb52e2a6b5a7a6f9611e3a6739", "Requirements": [ "RSpectra" - ] + ], + "Hash": "fa4822bb52e2a6b5a7a6f9611e3a6739" }, "rappdirs": { "Package": "rappdirs", "Version": "0.3.3", "Source": "Repository", "Repository": "CRAN", - "Hash": "5e3c5dc0b071b21fa128676560dbe94d", - "Requirements": [] + "Requirements": [ + "R" + ], + "Hash": "5e3c5dc0b071b21fa128676560dbe94d" }, "rematch2": { "Package": "rematch2", "Version": "2.1.2", "Source": "Repository", "Repository": "CRAN", - "Hash": "76c9e04c712a05848ae7a23d2f170a40", "Requirements": [ "tibble" - ] + ], + "Hash": "76c9e04c712a05848ae7a23d2f170a40" }, "renv": { "Package": "renv", - "Version": "0.15.5", + "Version": "1.0.7", "Source": "Repository", "Repository": "CRAN", - "Hash": "6a38294e7d12f5d8e656b08c5bd8ae34", - "Requirements": [] + "Requirements": [ + "utils" + ], + "Hash": "397b7b2a265bc5a7a06852524dabae20" }, "reshape2": { "Package": "reshape2", "Version": "1.4.4", "Source": "Repository", "Repository": "CRAN", - "Hash": "bb5996d0bd962d214a11140d77589917", "Requirements": [ + "R", "Rcpp", "plyr", "stringr" - ] + ], + "Hash": "bb5996d0bd962d214a11140d77589917" }, "rgl": { "Package": "rgl", - "Version": "0.109.6", + "Version": "1.3.1", "Source": "Repository", "Repository": "CRAN", - "Hash": "0da70b9563a89391fd62468724cdbafc", "Requirements": [ + "R", "R6", "base64enc", + "grDevices", + "graphics", "htmltools", "htmlwidgets", "jsonlite", "knitr", "magrittr", - "mime" - ] + "mime", + "stats", + "utils" + ], + "Hash": "54f8dcdef54a2c7737c0eec27d394dfd" }, "rlang": { "Package": "rlang", - "Version": "1.0.4", + "Version": "1.1.4", "Source": "Repository", "Repository": "CRAN", - "Hash": "6539dd8c651e67e3b55b5ffea106362b", - "Requirements": [] + "Requirements": [ + "R", + "utils" + ], + "Hash": "3eec01f8b1dee337674b2e34ab1f9bc1" }, "rmarkdown": { "Package": "rmarkdown", - "Version": "2.14", + "Version": "2.28", "Source": "Repository", "Repository": "CRAN", - "Hash": "31b60a882fabfabf6785b8599ffeb8ba", "Requirements": [ + "R", "bslib", "evaluate", + "fontawesome", "htmltools", "jquerylib", "jsonlite", "knitr", - "stringr", + "methods", "tinytex", + "tools", + "utils", "xfun", "yaml" - ] + ], + "Hash": "062470668513dcda416927085ee9bdc7" }, "rprojroot": { "Package": "rprojroot", - "Version": "2.0.3", + "Version": "2.0.4", "Source": "Repository", "Repository": "CRAN", - "Hash": "1de7ab598047a87bba48434ba35d497d", - "Requirements": [] + "Requirements": [ + "R" + ], + "Hash": "4c8415e0ec1e29f3f4f6fc108bef0144" }, "rstudioapi": { "Package": "rstudioapi", - "Version": "0.13", + "Version": "0.16.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "06c85365a03fdaf699966cc1d3cf53ea", - "Requirements": [] + "Hash": "96710351d642b70e8f02ddeb237c46a7" }, "rvcheck": { "Package": "rvcheck", "Version": "0.2.1", "Source": "Repository", "Repository": "CRAN", - "Hash": "fee46a3924e5522b01138accbe6ff804", "Requirements": [ "BiocManager", + "R", + "utils", "yulab.utils" - ] + ], + "Hash": "fee46a3924e5522b01138accbe6ff804" }, "sass": { "Package": "sass", - "Version": "0.4.2", + "Version": "0.4.9", "Source": "Repository", "Repository": "CRAN", - "Hash": "1b191143d7d3444d504277843f3a95fe", "Requirements": [ "R6", "fs", "htmltools", "rappdirs", "rlang" - ] + ], + "Hash": "d53dbfddf695303ea4ad66f86e99b95d" }, "scales": { "Package": "scales", - "Version": "1.2.0", + "Version": "1.3.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "6e8750cdd13477aa440d453da93d5cac", "Requirements": [ + "R", "R6", "RColorBrewer", + "cli", "farver", + "glue", "labeling", "lifecycle", "munsell", "rlang", "viridisLite" - ] + ], + "Hash": "c19df082ba346b0ffa6f833e92de34d1" }, "snow": { "Package": "snow", "Version": "0.4-4", "Source": "Repository", "Repository": "CRAN", - "Hash": "40b74690debd20c57d93d8c246b305d4", - "Requirements": [] + "Requirements": [ + "R", + "utils" + ], + "Hash": "40b74690debd20c57d93d8c246b305d4" }, "stringi": { "Package": "stringi", - "Version": "1.7.8", + "Version": "1.8.4", "Source": "Repository", "Repository": "CRAN", - "Hash": "a68b980681bcbc84c7a67003fa796bfb", - "Requirements": [] + "Requirements": [ + "R", + "stats", + "tools", + "utils" + ], + "Hash": "39e1144fd75428983dc3f63aa53dfa91" }, "stringr": { "Package": "stringr", - "Version": "1.4.0", + "Version": "1.5.1", "Source": "Repository", "Repository": "CRAN", - "Hash": "0759e6b6c0957edb1311028a49a35e76", "Requirements": [ + "R", + "cli", "glue", + "lifecycle", "magrittr", - "stringi" - ] + "rlang", + "stringi", + "vctrs" + ], + "Hash": "960e2ae9e09656611e0b8214ad543207" }, "sys": { "Package": "sys", - "Version": "3.4", + "Version": "3.4.2", "Source": "Repository", "Repository": "CRAN", - "Hash": "b227d13e29222b4574486cfcbde077fa", - "Requirements": [] + "Hash": "3a1be13d68d47a8cd0bfd74739ca1555" }, "testthat": { "Package": "testthat", - "Version": "3.1.4", + "Version": "3.2.1.1", "Source": "Repository", "Repository": "CRAN", - "Hash": "f76c2a02d0fdc24aa7a47ea34261a6e3", "Requirements": [ + "R", "R6", "brio", "callr", "cli", - "crayon", "desc", "digest", - "ellipsis", "evaluate", "jsonlite", "lifecycle", "magrittr", + "methods", "pkgload", "praise", "processx", "ps", "rlang", + "utils", "waldo", "withr" - ] + ], + "Hash": "3f6e7e5e2220856ff865e4834766bf2b" }, "tibble": { "Package": "tibble", - "Version": "3.1.8", + "Version": "3.2.1", "Source": "Repository", "Repository": "CRAN", - "Hash": "56b6934ef0f8c68225949a8672fe1a8f", "Requirements": [ + "R", "fansi", "lifecycle", "magrittr", + "methods", "pillar", "pkgconfig", "rlang", + "utils", "vctrs" - ] + ], + "Hash": "a84e2cc86d07289b3b6f5069df7a004c" }, "tidyr": { "Package": "tidyr", - "Version": "1.2.0", + "Version": "1.3.1", "Source": "Repository", "Repository": "CRAN", - "Hash": "d8b95b7fee945d7da6888cf7eb71a49c", "Requirements": [ + "R", + "cli", "cpp11", "dplyr", - "ellipsis", "glue", "lifecycle", "magrittr", "purrr", "rlang", + "stringr", "tibble", "tidyselect", + "utils", "vctrs" - ] + ], + "Hash": "915fb7ce036c22a6a33b5a8adb712eb1" }, "tidyselect": { "Package": "tidyselect", - "Version": "1.1.2", + "Version": "1.2.1", "Source": "Repository", "Repository": "CRAN", - "Hash": "17f6da8cfd7002760a859915ce7eef8f", "Requirements": [ - "ellipsis", + "R", + "cli", "glue", - "purrr", + "lifecycle", "rlang", - "vctrs" - ] + "vctrs", + "withr" + ], + "Hash": "829f27b9c4919c16b593794a6344d6c0" }, "tinytex": { "Package": "tinytex", - "Version": "0.40", + "Version": "0.52", "Source": "Repository", "Repository": "CRAN", - "Hash": "e7b654da5e77bc4e5435a966329cd25f", "Requirements": [ "xfun" - ] + ], + "Hash": "cfbad971a71f0e27cec22e544a08bc3b" }, "usethis": { "Package": "usethis", - "Version": "2.1.6", + "Version": "2.2.3", "Source": "Repository", "Repository": "CRAN", - "Hash": "a67a22c201832b12c036cc059f1d137d", "Requirements": [ + "R", "cli", "clipr", "crayon", @@ -1159,102 +1478,127 @@ "rlang", "rprojroot", "rstudioapi", + "stats", + "utils", "whisker", "withr", "yaml" - ] + ], + "Hash": "d524fd42c517035027f866064417d7e6" }, "utf8": { "Package": "utf8", - "Version": "1.2.2", + "Version": "1.2.4", "Source": "Repository", "Repository": "CRAN", - "Hash": "c9c462b759a5cc844ae25b5942654d13", - "Requirements": [] + "Requirements": [ + "R" + ], + "Hash": "62b65c52671e6665f803ff02954446e9" }, "vctrs": { "Package": "vctrs", - "Version": "0.4.1", + "Version": "0.6.5", "Source": "Repository", "Repository": "CRAN", - "Hash": "8b54f22e2a58c4f275479c92ce041a57", "Requirements": [ + "R", "cli", "glue", + "lifecycle", "rlang" - ] + ], + "Hash": "c03fa420630029418f7e6da3667aac4a" }, "viridisLite": { "Package": "viridisLite", - "Version": "0.4.0", + "Version": "0.4.2", "Source": "Repository", "Repository": "CRAN", - "Hash": "55e157e2aa88161bdb0754218470d204", - "Requirements": [] + "Requirements": [ + "R" + ], + "Hash": "c826c7c4241b6fc89ff55aaea3fa7491" }, "waldo": { "Package": "waldo", - "Version": "0.4.0", + "Version": "0.5.2", "Source": "Repository", "Repository": "CRAN", - "Hash": "035fba89d0c86e2113120f93301b98ad", "Requirements": [ + "R", "cli", "diffobj", "fansi", "glue", + "methods", "rematch2", "rlang", "tibble" - ] + ], + "Hash": "c7d3fd6d29ab077cbac8f0e2751449e6" }, "whisker": { "Package": "whisker", - "Version": "0.4", + "Version": "0.4.1", "Source": "Repository", "Repository": "CRAN", - "Hash": "ca970b96d894e90397ed20637a0c1bbe", - "Requirements": [] + "Hash": "c6abfa47a46d281a7d5159d0a8891e88" }, "withr": { "Package": "withr", - "Version": "2.5.0", + "Version": "3.0.1", "Source": "Repository", "Repository": "CRAN", - "Hash": "c0e49a9760983e81e55cdd9be92e7182", - "Requirements": [] + "Requirements": [ + "R", + "grDevices", + "graphics" + ], + "Hash": "07909200e8bbe90426fbfeb73e1e27aa" }, "xfun": { "Package": "xfun", - "Version": "0.32", + "Version": "0.46", "Source": "Repository", "Repository": "CRAN", - "Hash": "0498af3034691dde715dcd86198efe75", - "Requirements": [] + "Requirements": [ + "grDevices", + "stats", + "tools" + ], + "Hash": "00ce32f398db0415dde61abfef11300c" }, "yaml": { "Package": "yaml", - "Version": "2.3.5", + "Version": "2.3.9", "Source": "Repository", "Repository": "CRAN", - "Hash": "458bb38374d73bf83b1bb85e353da200", - "Requirements": [] + "Hash": "9cb28d11799d93c953f852083d55ee9e" }, "yulab.utils": { "Package": "yulab.utils", - "Version": "0.0.5", + "Version": "0.1.4", "Source": "Repository", "Repository": "CRAN", - "Hash": "5727ef3682ea54af05114ef993b99812", - "Requirements": [] + "Requirements": [ + "cli", + "digest", + "fs", + "memoise", + "rlang", + "stats", + "tools", + "utils" + ], + "Hash": "60ee2aaa179dc282e9fa7367bad76e89" }, "zip": { "Package": "zip", - "Version": "2.2.0", + "Version": "2.3.1", "Source": "Repository", "Repository": "CRAN", - "Hash": "c7eef2996ac270a18c2715c997a727c5", - "Requirements": [] + "Hash": "fcc4bd8e6da2d2011eb64a5e5cc685ab" } } } diff --git a/renv/.gitignore b/renv/.gitignore index 275e4ca3..22a0d01d 100644 --- a/renv/.gitignore +++ b/renv/.gitignore @@ -1,3 +1,4 @@ +sandbox/ library/ local/ cellar/ diff --git a/renv/activate.R b/renv/activate.R index 72c0818a..d13f9932 100644 --- a/renv/activate.R +++ b/renv/activate.R @@ -2,10 +2,28 @@ local({ # the requested version of renv - version <- "0.15.5" + version <- "1.0.7" + attr(version, "sha") <- NULL # the project directory - project <- getwd() + project <- Sys.getenv("RENV_PROJECT") + if (!nzchar(project)) + project <- getwd() + + # use start-up diagnostics if enabled + diagnostics <- Sys.getenv("RENV_STARTUP_DIAGNOSTICS", unset = "FALSE") + if (diagnostics) { + start <- Sys.time() + profile <- tempfile("renv-startup-", fileext = ".Rprof") + utils::Rprof(profile) + on.exit({ + utils::Rprof(NULL) + elapsed <- signif(difftime(Sys.time(), start, units = "auto"), digits = 2L) + writeLines(sprintf("- renv took %s to run the autoloader.", format(elapsed))) + writeLines(sprintf("- Profile: %s", profile)) + print(utils::summaryRprof(profile)) + }, add = TRUE) + } # figure out whether the autoloader is enabled enabled <- local({ @@ -15,6 +33,14 @@ local({ if (!is.null(override)) return(override) + # if we're being run in a context where R_LIBS is already set, + # don't load -- presumably we're being run as a sub-process and + # the parent process has already set up library paths for us + rcmd <- Sys.getenv("R_CMD", unset = NA) + rlibs <- Sys.getenv("R_LIBS", unset = NA) + if (!is.na(rlibs) && !is.na(rcmd)) + return(FALSE) + # next, check environment variables # TODO: prefer using the configuration one in the future envvars <- c( @@ -34,9 +60,22 @@ local({ }) - if (!enabled) + # bail if we're not enabled + if (!enabled) { + + # if we're not enabled, we might still need to manually load + # the user profile here + profile <- Sys.getenv("R_PROFILE_USER", unset = "~/.Rprofile") + if (file.exists(profile)) { + cfg <- Sys.getenv("RENV_CONFIG_USER_PROFILE", unset = "TRUE") + if (tolower(cfg) %in% c("true", "t", "1")) + sys.source(profile, envir = globalenv()) + } + return(FALSE) + } + # avoid recursion if (identical(getOption("renv.autoloader.running"), TRUE)) { warning("ignoring recursive attempt to run renv autoloader") @@ -60,21 +99,90 @@ local({ # load bootstrap tools `%||%` <- function(x, y) { - if (is.environment(x) || length(x)) x else y + if (is.null(x)) y else x + } + + catf <- function(fmt, ..., appendLF = TRUE) { + + quiet <- getOption("renv.bootstrap.quiet", default = FALSE) + if (quiet) + return(invisible()) + + msg <- sprintf(fmt, ...) + cat(msg, file = stdout(), sep = if (appendLF) "\n" else "") + + invisible(msg) + + } + + header <- function(label, + ..., + prefix = "#", + suffix = "-", + n = min(getOption("width"), 78)) + { + label <- sprintf(label, ...) + n <- max(n - nchar(label) - nchar(prefix) - 2L, 8L) + if (n <= 0) + return(paste(prefix, label)) + + tail <- paste(rep.int(suffix, n), collapse = "") + paste0(prefix, " ", label, " ", tail) + + } + + heredoc <- function(text, leave = 0) { + + # remove leading, trailing whitespace + trimmed <- gsub("^\\s*\\n|\\n\\s*$", "", text) + + # split into lines + lines <- strsplit(trimmed, "\n", fixed = TRUE)[[1L]] + + # compute common indent + indent <- regexpr("[^[:space:]]", lines) + common <- min(setdiff(indent, -1L)) - leave + paste(substring(lines, common), collapse = "\n") + + } + + startswith <- function(string, prefix) { + substring(string, 1, nchar(prefix)) == prefix } bootstrap <- function(version, library) { + friendly <- renv_bootstrap_version_friendly(version) + section <- header(sprintf("Bootstrapping renv %s", friendly)) + catf(section) + # attempt to download renv - tarball <- tryCatch(renv_bootstrap_download(version), error = identity) - if (inherits(tarball, "error")) - stop("failed to download renv ", version) + catf("- Downloading renv ... ", appendLF = FALSE) + withCallingHandlers( + tarball <- renv_bootstrap_download(version), + error = function(err) { + catf("FAILED") + stop("failed to download:\n", conditionMessage(err)) + } + ) + catf("OK") + on.exit(unlink(tarball), add = TRUE) # now attempt to install - status <- tryCatch(renv_bootstrap_install(version, tarball, library), error = identity) - if (inherits(status, "error")) - stop("failed to install renv ", version) + catf("- Installing renv ... ", appendLF = FALSE) + withCallingHandlers( + status <- renv_bootstrap_install(version, tarball, library), + error = function(err) { + catf("FAILED") + stop("failed to install:\n", conditionMessage(err)) + } + ) + catf("OK") + + # add empty line to break up bootstrapping from normal output + catf("") + return(invisible()) } renv_bootstrap_tests_running <- function() { @@ -83,28 +191,32 @@ local({ renv_bootstrap_repos <- function() { + # get CRAN repository + cran <- getOption("renv.repos.cran", "https://cloud.r-project.org") + # check for repos override repos <- Sys.getenv("RENV_CONFIG_REPOS_OVERRIDE", unset = NA) - if (!is.na(repos)) + if (!is.na(repos)) { + + # check for RSPM; if set, use a fallback repository for renv + rspm <- Sys.getenv("RSPM", unset = NA) + if (identical(rspm, repos)) + repos <- c(RSPM = rspm, CRAN = cran) + return(repos) + } + # check for lockfile repositories repos <- tryCatch(renv_bootstrap_repos_lockfile(), error = identity) if (!inherits(repos, "error") && length(repos)) return(repos) - # if we're testing, re-use the test repositories - if (renv_bootstrap_tests_running()) - return(getOption("renv.tests.repos")) - # retrieve current repos repos <- getOption("repos") # ensure @CRAN@ entries are resolved - repos[repos == "@CRAN@"] <- getOption( - "renv.repos.cran", - "https://cloud.r-project.org" - ) + repos[repos == "@CRAN@"] <- cran # add in renv.bootstrap.repos if set default <- c(FALLBACK = "https://cloud.r-project.org") @@ -143,33 +255,34 @@ local({ renv_bootstrap_download <- function(version) { - # if the renv version number has 4 components, assume it must - # be retrieved via github - nv <- numeric_version(version) - components <- unclass(nv)[[1]] - - # if this appears to be a development version of 'renv', we'll - # try to restore from github - dev <- length(components) == 4L - - # begin collecting different methods for finding renv - methods <- c( - renv_bootstrap_download_tarball, - if (dev) - renv_bootstrap_download_github - else c( - renv_bootstrap_download_cran_latest, - renv_bootstrap_download_cran_archive + sha <- attr(version, "sha", exact = TRUE) + + methods <- if (!is.null(sha)) { + + # attempting to bootstrap a development version of renv + c( + function() renv_bootstrap_download_tarball(sha), + function() renv_bootstrap_download_github(sha) ) - ) + + } else { + + # attempting to bootstrap a release version of renv + c( + function() renv_bootstrap_download_tarball(version), + function() renv_bootstrap_download_cran_latest(version), + function() renv_bootstrap_download_cran_archive(version) + ) + + } for (method in methods) { - path <- tryCatch(method(version), error = identity) + path <- tryCatch(method(), error = identity) if (is.character(path) && file.exists(path)) return(path) } - stop("failed to download renv ", version) + stop("All download methods failed") } @@ -185,43 +298,75 @@ local({ if (fixup) mode <- "w+b" - utils::download.file( + args <- list( url = url, destfile = destfile, mode = mode, quiet = TRUE ) + if ("headers" %in% names(formals(utils::download.file))) + args$headers <- renv_bootstrap_download_custom_headers(url) + + do.call(utils::download.file, args) + } - renv_bootstrap_download_cran_latest <- function(version) { + renv_bootstrap_download_custom_headers <- function(url) { - spec <- renv_bootstrap_download_cran_latest_find(version) + headers <- getOption("renv.download.headers") + if (is.null(headers)) + return(character()) + + if (!is.function(headers)) + stopf("'renv.download.headers' is not a function") - message("* Downloading renv ", version, " ... ", appendLF = FALSE) + headers <- headers(url) + if (length(headers) == 0L) + return(character()) + if (is.list(headers)) + headers <- unlist(headers, recursive = FALSE, use.names = TRUE) + + ok <- + is.character(headers) && + is.character(names(headers)) && + all(nzchar(names(headers))) + + if (!ok) + stop("invocation of 'renv.download.headers' did not return a named character vector") + + headers + + } + + renv_bootstrap_download_cran_latest <- function(version) { + + spec <- renv_bootstrap_download_cran_latest_find(version) type <- spec$type repos <- spec$repos - info <- tryCatch( - utils::download.packages( - pkgs = "renv", - destdir = tempdir(), - repos = repos, - type = type, - quiet = TRUE - ), + baseurl <- utils::contrib.url(repos = repos, type = type) + ext <- if (identical(type, "source")) + ".tar.gz" + else if (Sys.info()[["sysname"]] == "Windows") + ".zip" + else + ".tgz" + name <- sprintf("renv_%s%s", version, ext) + url <- paste(baseurl, name, sep = "/") + + destfile <- file.path(tempdir(), name) + status <- tryCatch( + renv_bootstrap_download_impl(url, destfile), condition = identity ) - if (inherits(info, "condition")) { - message("FAILED") + if (inherits(status, "condition")) return(FALSE) - } # report success and return - message("OK (downloaded ", type, ")") - info[1, 2] + destfile } @@ -277,8 +422,6 @@ local({ urls <- file.path(repos, "src/contrib/Archive/renv", name) destfile <- file.path(tempdir(), name) - message("* Downloading renv ", version, " ... ", appendLF = FALSE) - for (url in urls) { status <- tryCatch( @@ -286,14 +429,11 @@ local({ condition = identity ) - if (identical(status, 0L)) { - message("OK") + if (identical(status, 0L)) return(destfile) - } } - message("FAILED") return(FALSE) } @@ -307,8 +447,7 @@ local({ return() # allow directories - info <- file.info(tarball, extra_cols = FALSE) - if (identical(info$isdir, TRUE)) { + if (dir.exists(tarball)) { name <- sprintf("renv_%s.tar.gz", version) tarball <- file.path(tarball, name) } @@ -317,7 +456,7 @@ local({ if (!file.exists(tarball)) { # let the user know we weren't able to honour their request - fmt <- "* RENV_BOOTSTRAP_TARBALL is set (%s) but does not exist." + fmt <- "- RENV_BOOTSTRAP_TARBALL is set (%s) but does not exist." msg <- sprintf(fmt, tarball) warning(msg) @@ -326,10 +465,7 @@ local({ } - fmt <- "* Bootstrapping with tarball at path '%s'." - msg <- sprintf(fmt, tarball) - message(msg) - + catf("- Using local tarball '%s'.", tarball) tarball } @@ -356,8 +492,6 @@ local({ on.exit(do.call(base::options, saved), add = TRUE) } - message("* Downloading renv ", version, " from GitHub ... ", appendLF = FALSE) - url <- file.path("https://api.github.com/repos/rstudio/renv/tarball", version) name <- sprintf("renv_%s.tar.gz", version) destfile <- file.path(tempdir(), name) @@ -367,26 +501,105 @@ local({ condition = identity ) - if (!identical(status, 0L)) { - message("FAILED") + if (!identical(status, 0L)) return(FALSE) - } - message("OK") + renv_bootstrap_download_augment(destfile) + return(destfile) } + # Add Sha to DESCRIPTION. This is stop gap until #890, after which we + # can use renv::install() to fully capture metadata. + renv_bootstrap_download_augment <- function(destfile) { + sha <- renv_bootstrap_git_extract_sha1_tar(destfile) + if (is.null(sha)) { + return() + } + + # Untar + tempdir <- tempfile("renv-github-") + on.exit(unlink(tempdir, recursive = TRUE), add = TRUE) + untar(destfile, exdir = tempdir) + pkgdir <- dir(tempdir, full.names = TRUE)[[1]] + + # Modify description + desc_path <- file.path(pkgdir, "DESCRIPTION") + desc_lines <- readLines(desc_path) + remotes_fields <- c( + "RemoteType: github", + "RemoteHost: api.github.com", + "RemoteRepo: renv", + "RemoteUsername: rstudio", + "RemotePkgRef: rstudio/renv", + paste("RemoteRef: ", sha), + paste("RemoteSha: ", sha) + ) + writeLines(c(desc_lines[desc_lines != ""], remotes_fields), con = desc_path) + + # Re-tar + local({ + old <- setwd(tempdir) + on.exit(setwd(old), add = TRUE) + + tar(destfile, compression = "gzip") + }) + invisible() + } + + # Extract the commit hash from a git archive. Git archives include the SHA1 + # hash as the comment field of the tarball pax extended header + # (see https://www.kernel.org/pub/software/scm/git/docs/git-archive.html) + # For GitHub archives this should be the first header after the default one + # (512 byte) header. + renv_bootstrap_git_extract_sha1_tar <- function(bundle) { + + # open the bundle for reading + # We use gzcon for everything because (from ?gzcon) + # > Reading from a connection which does not supply a 'gzip' magic + # > header is equivalent to reading from the original connection + conn <- gzcon(file(bundle, open = "rb", raw = TRUE)) + on.exit(close(conn)) + + # The default pax header is 512 bytes long and the first pax extended header + # with the comment should be 51 bytes long + # `52 comment=` (11 chars) + 40 byte SHA1 hash + len <- 0x200 + 0x33 + res <- rawToChar(readBin(conn, "raw", n = len)[0x201:len]) + + if (grepl("^52 comment=", res)) { + sub("52 comment=", "", res) + } else { + NULL + } + } + renv_bootstrap_install <- function(version, tarball, library) { # attempt to install it into project library - message("* Installing renv ", version, " ... ", appendLF = FALSE) dir.create(library, showWarnings = FALSE, recursive = TRUE) + output <- renv_bootstrap_install_impl(library, tarball) + + # check for successful install + status <- attr(output, "status") + if (is.null(status) || identical(status, 0L)) + return(status) + + # an error occurred; report it + header <- "installation of renv failed" + lines <- paste(rep.int("=", nchar(header)), collapse = "") + text <- paste(c(header, lines, output), collapse = "\n") + stop(text) + + } + + renv_bootstrap_install_impl <- function(library, tarball) { # invoke using system2 so we can capture and report output bin <- R.home("bin") exe <- if (Sys.info()[["sysname"]] == "Windows") "R.exe" else "R" - r <- file.path(bin, exe) + R <- file.path(bin, exe) args <- c( "--vanilla", "CMD", "INSTALL", "--no-multiarch", @@ -394,19 +607,7 @@ local({ shQuote(path.expand(tarball)) ) - output <- system2(r, args, stdout = TRUE, stderr = TRUE) - message("Done!") - - # check for successful install - status <- attr(output, "status") - if (is.numeric(status) && !identical(status, 0L)) { - header <- "Error installing renv:" - lines <- paste(rep.int("=", nchar(header)), collapse = "") - text <- c(header, lines, output) - writeLines(text, con = stderr()) - } - - status + system2(R, args, stdout = TRUE, stderr = TRUE) } @@ -447,6 +648,9 @@ local({ # if the user has requested an automatic prefix, generate it auto <- Sys.getenv("RENV_PATHS_PREFIX_AUTO", unset = NA) + if (is.na(auto) && getRversion() >= "4.4.0") + auto <- "TRUE" + if (auto %in% c("TRUE", "True", "true", "1")) return(renv_bootstrap_platform_prefix_auto()) @@ -616,34 +820,61 @@ local({ } - renv_bootstrap_validate_version <- function(version) { + renv_bootstrap_validate_version <- function(version, description = NULL) { + + # resolve description file + # + # avoid passing lib.loc to `packageDescription()` below, since R will + # use the loaded version of the package by default anyhow. note that + # this function should only be called after 'renv' is loaded + # https://github.com/rstudio/renv/issues/1625 + description <- description %||% packageDescription("renv") + + # check whether requested version 'version' matches loaded version of renv + sha <- attr(version, "sha", exact = TRUE) + valid <- if (!is.null(sha)) + renv_bootstrap_validate_version_dev(sha, description) + else + renv_bootstrap_validate_version_release(version, description) - loadedversion <- utils::packageDescription("renv", fields = "Version") - if (version == loadedversion) + if (valid) return(TRUE) - # assume four-component versions are from GitHub; three-component - # versions are from CRAN - components <- strsplit(loadedversion, "[.-]")[[1]] - remote <- if (length(components) == 4L) - paste("rstudio/renv", loadedversion, sep = "@") + # the loaded version of renv doesn't match the requested version; + # give the user instructions on how to proceed + dev <- identical(description[["RemoteType"]], "github") + remote <- if (dev) + paste("rstudio/renv", description[["RemoteSha"]], sep = "@") else - paste("renv", loadedversion, sep = "@") + paste("renv", description[["Version"]], sep = "@") - fmt <- paste( - "renv %1$s was loaded from project library, but this project is configured to use renv %2$s.", - "Use `renv::record(\"%3$s\")` to record renv %1$s in the lockfile.", - "Use `renv::restore(packages = \"renv\")` to install renv %2$s into the project library.", - sep = "\n" + # display both loaded version + sha if available + friendly <- renv_bootstrap_version_friendly( + version = description[["Version"]], + sha = if (dev) description[["RemoteSha"]] ) - msg <- sprintf(fmt, loadedversion, version, remote) - warning(msg, call. = FALSE) + fmt <- heredoc(" + renv %1$s was loaded from project library, but this project is configured to use renv %2$s. + - Use `renv::record(\"%3$s\")` to record renv %1$s in the lockfile. + - Use `renv::restore(packages = \"renv\")` to install renv %2$s into the project library. + ") + catf(fmt, friendly, renv_bootstrap_version_friendly(version), remote) FALSE } + renv_bootstrap_validate_version_dev <- function(version, description) { + expected <- description[["RemoteSha"]] + is.character(expected) && startswith(expected, version) + } + + renv_bootstrap_validate_version_release <- function(version, description) { + expected <- description[["Version"]] + is.character(expected) && identical(expected, version) + } + renv_bootstrap_hash_text <- function(text) { hashfile <- tempfile("renv-hash-") @@ -663,6 +894,12 @@ local({ # warn if the version of renv loaded does not match renv_bootstrap_validate_version(version) + # execute renv load hooks, if any + hooks <- getHook("renv::autoload") + for (hook in hooks) + if (is.function(hook)) + tryCatch(hook(), error = warnify) + # load the project renv::load(project) @@ -678,7 +915,7 @@ local({ return(profile) # check for a profile file (nothing to do if it doesn't exist) - path <- renv_bootstrap_paths_renv("profile", profile = FALSE) + path <- renv_bootstrap_paths_renv("profile", profile = FALSE, project = project) if (!file.exists(path)) return(NULL) @@ -802,12 +1039,78 @@ local({ } + renv_bootstrap_version_friendly <- function(version, shafmt = NULL, sha = NULL) { + sha <- sha %||% attr(version, "sha", exact = TRUE) + parts <- c(version, sprintf(shafmt %||% " [sha: %s]", substring(sha, 1L, 7L))) + paste(parts, collapse = "") + } + + renv_bootstrap_exec <- function(project, libpath, version) { + if (!renv_bootstrap_load(project, libpath, version)) + renv_bootstrap_run(version, libpath) + } + + renv_bootstrap_run <- function(version, libpath) { + + # perform bootstrap + bootstrap(version, libpath) + + # exit early if we're just testing bootstrap + if (!is.na(Sys.getenv("RENV_BOOTSTRAP_INSTALL_ONLY", unset = NA))) + return(TRUE) + + # try again to load + if (requireNamespace("renv", lib.loc = libpath, quietly = TRUE)) { + return(renv::load(project = getwd())) + } + + # failed to download or load renv; warn the user + msg <- c( + "Failed to find an renv installation: the project will not be loaded.", + "Use `renv::activate()` to re-initialize the project." + ) + + warning(paste(msg, collapse = "\n"), call. = FALSE) + + } renv_json_read <- function(file = NULL, text = NULL) { - text <- paste(text %||% read(file), collapse = "\n") + jlerr <- NULL + + # if jsonlite is loaded, use that instead + if ("jsonlite" %in% loadedNamespaces()) { + + json <- tryCatch(renv_json_read_jsonlite(file, text), error = identity) + if (!inherits(json, "error")) + return(json) + + jlerr <- json + + } + + # otherwise, fall back to the default JSON reader + json <- tryCatch(renv_json_read_default(file, text), error = identity) + if (!inherits(json, "error")) + return(json) + + # report an error + if (!is.null(jlerr)) + stop(jlerr) + else + stop(json) + + } + + renv_json_read_jsonlite <- function(file = NULL, text = NULL) { + text <- paste(text %||% readLines(file, warn = FALSE), collapse = "\n") + jsonlite::fromJSON(txt = text, simplifyVector = FALSE) + } + + renv_json_read_default <- function(file = NULL, text = NULL) { # find strings in the JSON + text <- paste(text %||% readLines(file, warn = FALSE), collapse = "\n") pattern <- '["](?:(?:\\\\.)|(?:[^"\\\\]))*?["]' locs <- gregexpr(pattern, text, perl = TRUE)[[1]] @@ -838,8 +1141,9 @@ local({ # transform the JSON into something the R parser understands transformed <- replaced - transformed <- gsub("[[{]", "list(", transformed) - transformed <- gsub("[]}]", ")", transformed) + transformed <- gsub("{}", "`names<-`(list(), character())", transformed, fixed = TRUE) + transformed <- gsub("[[{]", "list(", transformed, perl = TRUE) + transformed <- gsub("[]}]", ")", transformed, perl = TRUE) transformed <- gsub(":", "=", transformed, fixed = TRUE) text <- paste(transformed, collapse = "\n") @@ -854,14 +1158,14 @@ local({ map <- as.list(map) # remap strings in object - remapped <- renv_json_remap(json, map) + remapped <- renv_json_read_remap(json, map) # evaluate eval(remapped, envir = baseenv()) } - renv_json_remap <- function(json, map) { + renv_json_read_remap <- function(json, map) { # fix names if (!is.null(names(json))) { @@ -888,7 +1192,7 @@ local({ # recurse if (is.recursive(json)) { for (i in seq_along(json)) { - json[i] <- list(renv_json_remap(json[[i]], map)) + json[i] <- list(renv_json_read_remap(json[[i]], map)) } } @@ -908,35 +1212,9 @@ local({ # construct full libpath libpath <- file.path(root, prefix) - # attempt to load - if (renv_bootstrap_load(project, libpath, version)) - return(TRUE) - - # load failed; inform user we're about to bootstrap - prefix <- paste("# Bootstrapping renv", version) - postfix <- paste(rep.int("-", 77L - nchar(prefix)), collapse = "") - header <- paste(prefix, postfix) - message(header) - - # perform bootstrap - bootstrap(version, libpath) - - # exit early if we're just testing bootstrap - if (!is.na(Sys.getenv("RENV_BOOTSTRAP_INSTALL_ONLY", unset = NA))) - return(TRUE) - - # try again to load - if (requireNamespace("renv", lib.loc = libpath, quietly = TRUE)) { - message("* Successfully installed and loaded renv ", version, ".") - return(renv::load()) - } - - # failed to download or load renv; warn the user - msg <- c( - "Failed to find an renv installation: the project will not be loaded.", - "Use `renv::activate()` to re-initialize the project." - ) + # run bootstrap code + renv_bootstrap_exec(project, libpath, version) - warning(paste(msg, collapse = "\n"), call. = FALSE) + invisible() }) diff --git a/renv/settings.json b/renv/settings.json new file mode 100644 index 00000000..b70deb09 --- /dev/null +++ b/renv/settings.json @@ -0,0 +1,19 @@ +{ + "bioconductor.version": "3.15", + "external.libraries": [], + "ignored.packages": [], + "package.dependency.fields": [ + "Imports", + "Depends", + "LinkingTo" + ], + "ppm.enabled": null, + "ppm.ignored.urls": [], + "r.version": [], + "snapshot.type": "implicit", + "use.cache": true, + "vcs.ignore.cellar": true, + "vcs.ignore.library": true, + "vcs.ignore.local": true, + "vcs.manage.ignores": true +} diff --git a/tests/testthat/test-tens.mproduct.R b/tests/testthat/test-tens.mproduct.R new file mode 100644 index 00000000..e74a04d2 --- /dev/null +++ b/tests/testthat/test-tens.mproduct.R @@ -0,0 +1,192 @@ +context("m-product utilities") +# bltodo: add tests for matrix inputs +# bltodo: run on linux system to test parallel algorithms + +#' @description Use for internal testing. +#' Performs a DCT-II transform using the stats::fft algorithm. Produces +#' identical output to the scipy.fft.dct implementation in Python. +#' @param vec A numeric real vector to transform. +#' @param ortho Logical, if TRUE the output is orthonormal. +#' @return A numeric vector representing the DCT-II of the input. +#' @seealso docs.scipy.org/doc/scipy/tutorial/fft.html +dctii_scipy_equivalent <- function(vec, ortho = TRUE) { + # https://scipy.github.io/devdocs/reference/generated/scipy.fftpack.dct.html + # https://stackoverflow.com/questions/11215162 + # NOTE: this is dtt output scaled by 2 + n <- length(vec) + p <- exp(complex(imaginary = pi / 2 / n) * (seq(2 * n) - 1)) + unscaled_res <- Re(stats::fft(c(vec, rev(vec))) / p)[1:n] + + if (ortho) { + return( + c( + sqrt(1 / (4 * n)) * unscaled_res[1], + sqrt(1 / (2 * n)) * tail(unscaled_res, n - 1) + ) + ) + } else { + return(unscaled_res) + } +} + +test_that( + "gsignal's dct2 is producing the same result as the Scipy algorithm", + code = { + test_vector <- array(c(19, 3, 6, 11)) + transforms <- dctii_m_transforms(length(test_vector)) + m <- transforms$m + expect_equal(m(test_vector), matrix(dctii_scipy_equivalent(test_vector))) + } +) + +test_that( + "facewise product works the same as the naive algorithm", + code = { + n <- 2 + p <- 4 + t <- 3 + set.seed(1) + test_tensor1 <- array(rnorm(n * p * t), dim = c(n, p, t)) + test_tensor2 <- ft(test_tensor1) + expected_result <- array(0, dim = c(n, n, t)) + for (i in 1:t) { + expected_result[, , i] <- test_tensor1[, , i] %*% test_tensor2[, , i] + } + expect_equal( + test_tensor1 %fp% test_tensor2, + expected_result + ) + } +) + +test_that( + "mode-3 product result matches naive nested for-loop algorithm", + code = { + n <- 2 + p <- 4 + t <- 3 + test_tensor1 <- array(1:(n * p * t), dim = c(2, 4, 3)) + m_mat <- gsignal::dctmtx(t) + expected_result <- array(0, dim = c(2, 4, 3)) + for (nn in 1:n) { + for (pp in 1:p) { + expected_result[nn, pp, ] <- m_mat %*% test_tensor1[nn, pp, ] + } + } + transforms <- dctii_m_transforms(t) + m <- transforms$m + expect_equal(m(test_tensor1), expected_result) + } +) + +test_that( + "different mode-3 product algorithms produce equivalent results", + code = { + test_tensor1 <- array(1:24, dim = c(2, 4, 3)) + t <- dim(test_tensor1)[3] + transforms_default <- dctii_m_transforms(t, bpparam = NULL) + transforms_parallel <- dctii_m_transforms( + t, bpparam = BiocParallel::MulticoreParam() + ) + expect_equal( + transforms_default$m(test_tensor1), + # suppress warning "MulticoreParam() not supported on Windows, use + # SnowParam()" when running tests on Windows + suppressWarnings( + transforms_parallel$m(test_tensor1) + ) + ) + } +) + +test_that( + "forward and inverse dctii transforms invert each other", + code = { + test_tensor1 <- array(1:24, dim = c(2, 4, 3)) + transforms <- dctii_m_transforms(dim(test_tensor1)[3]) + m <- transforms$m + minv <- transforms$minv + expect_equal(test_tensor1, minv(m(test_tensor1))) + expect_equal(test_tensor1, m(minv(test_tensor1))) + } +) + +test_that( + # note this test does tend to throw warning messages on Windows + "different binary facewise algorithms produce equivalent results", + code = { + test_tensor1 <- array(1:24, dim = c(2, 4, 3)) + test_tensor2 <- array(1:36, dim = c(4, 3, 3)) + expect_equal( + facewise_product(test_tensor1, test_tensor2, bpparam = NULL), + # suppress warning "MulticoreParam() not supported on Windows, use + # SnowParam()" when running tests on Windows + suppressWarnings( + facewise_product( + test_tensor1, test_tensor2, + bpparam = BiocParallel::MulticoreParam() + ) + ) + ) + } +) + +test_that( + "cumulative multi input facewise product works as expected compared to + custom operator", + code = { + test_tensor1 <- array(1:24, dim = c(2, 4, 3)) + test_tensor2 <- array(1:60, dim = c(4, 5, 3)) + test_tensor3 <- array(1:90, dim = c(5, 6, 3)) + fp12 <- facewise_product(test_tensor1, test_tensor2) + expected_cumulative_fp <- facewise_product(fp12, test_tensor3) + expect_equal( + facewise_product(test_tensor1, test_tensor2, test_tensor3), + expected_cumulative_fp + ) + expect_equal( + test_tensor1 %fp% test_tensor2 %fp% test_tensor3, + expected_cumulative_fp + ) + } +) + +test_that( + "cumulative multi input m product works as expected", + code = { + test_tensor1 <- array(1:24, dim = c(2, 4, 3)) + test_tensor2 <- array(1:60, dim = c(4, 5, 3)) + test_tensor3 <- array(1:90, dim = c(5, 6, 3)) + mp12 <- m_product(test_tensor1, test_tensor2) + expected_cumulative_mp <- m_product(mp12, test_tensor3) + expect_equal( + m_product(test_tensor1, test_tensor2, test_tensor3), + expected_cumulative_mp + ) + } +) + +test_that( + "m_product() throws appropriate errors", + code = { + test_tensor1 <- array(1:24, dim = c(2, 4, 3)) + dummy_m <- function() 0 + # only defining m without minv + expect_error( + m_product(test_tensor1, m = dummy_m), + "If explicitly defined, both m and its inverse must be defined as + functions" + ) + # accidentally adding () to an input meaning it is not a callable + expect_error( + m_product(test_tensor1, m = dummy_m()), + "If explicitly defined, both m and its inverse must be defined as + functions" + ) + expect_error( + m_product(test_tensor1, m = dummy_m(), minv = dummy_m), + "If explicitly defined, both m and its inverse must be defined as + functions" + ) + } +) diff --git a/tests/testthat/test-tens.tpca.R b/tests/testthat/test-tens.tpca.R new file mode 100644 index 00000000..012d1447 --- /dev/null +++ b/tests/testthat/test-tens.tpca.R @@ -0,0 +1,17 @@ +context("tpca") +# bltodo: bpparam configuration + +test_that( + "basic tpca sense checks", + code = { + n <- 4 + p <- 5 + t <- 3 + ncomp_input <- 2 + test_tensor <- array(1:(n * p * t), dim = c(n, p, t)) + tpca_obj <- tpca(test_tensor, ncomp = ncomp_input) + expect_equal(length(tpca_obj$explained_variance), ncomp_input) + expect_equal(dim(tpca_obj$variates), c(n, ncomp_input)) + expect_equal(dim(tpca_obj$loadings), c(p, ncomp_input)) + } +) diff --git a/tests/testthat/test-tens.tpls.R b/tests/testthat/test-tens.tpls.R new file mode 100644 index 00000000..c1a46bb4 --- /dev/null +++ b/tests/testthat/test-tens.tpls.R @@ -0,0 +1,207 @@ +context("tpls") + +#' @description Unnames and ensures the top row in a matrix-type output is all +#' positive. This allows for comparison between pls results that are the same +#' but just differ by a negative sign due to svd solver or other implementation +#' detail. +.make_signs_consistent <- function(mat) { + mat <- unname(mat) + ncols <- dim(mat)[2] + for (i in seq_len(ncols)) { + if (mat[1, i] < 0) { + mat[, i] <- -mat[, i] + } + } + return(mat) +} + +test_that( + "tpls: tsvdm-tpls mode agrees with tpca", + code = { + # it actually takes a lot of coercing to make these two outputs comparable + # because their default configurations are aimed at completely different + # problems. that being said, this test here is probably still useful as a + # sense check. + # + # problem 1: squaring the singular values means that the k_t_flatten_sort + # compression is different and picks out a different compressed matrix. Sort + # order of the squared values is not necessarily the same. + # + # problem 2: loadings (and therefore projections) may differ in sign, think + # this is dependent on the svd solver implementation? + # + # problem 3: if you use centering (on for tpca and tpls by default), the + # faces of each tensor loses one rank, so only rank - 1 columns of each face + # in the loadings tensor ("v") will match. + # + # problem 4: tpls computes the svd on XtX, which is rank-deficient if X is + # non-square. this means that the last few columns of "v" are absolutely + # crap and should not be compared. these columns may not even exist in + # tpca depending on the input dimensions. + n <- 4 + p <- 5 + t <- 3 + k <- min(n, p) + ncomp_input <- 2 + + # this test fails if `test_tensor` is rank deficient, as is the case if we + # use array(1:24, dim = c(3, 4, 2)) + set.seed(1) + test_tensor <- array(rnorm(n * p * t, mean = 0, sd = 5), dim = c(n, p, t)) + + transforms <- dctii_m_transforms(t) + m <- transforms$m + minv <- transforms$minv + + tpca_obj <- tpca( + test_tensor, + ncomp = ncomp_input, + m = m, + minv = minv, + # turn off centering to get over problem 3 + center = FALSE, + # return full tensors to get over problem 1 + matrix_output = FALSE + ) + + tpls_obj <- tpls( + test_tensor, test_tensor, + ncomp = ncomp_input, + m = m, + minv = minv, + mode = "tsvdm", + # turn off centering to get over problem 3 + center = FALSE, + # return full tensors to get over problem 1 + matrix_output = FALSE + ) + + # compare elementwise absolute values to get over problem 2 + expect_equal( + abs(tpca_obj$loadings), + # only compare k columns to get over problem 4 + abs(tpls_obj$y_loadings[, 1:k, ]) + ) + + expect_equal( + abs(tpca_obj$variates), + abs(tpls_obj$y_projected) + ) + } +) + +test_that( + "canonical: tpls agrees with MixOmics pls", + code = { + n <- 4 + p <- 5 + q <- 7 + t <- 1 + k <- min(n, p, q) + ncomp_input <- 2 + + set.seed(1) + test_x <- array(rnorm(n * p * t, mean = 0, sd = 5), dim = c(n, p, t)) + test_y <- array(rnorm(n * q * t, mean = 0, sd = 3), dim = c(n, q, t)) + + # suppress the warning here "At least one study has less than 5 samples, + # mean centering might not do as expected" + suppressWarnings( + mixomics_pls <- pls( + test_x[, , 1], + test_y[, , 1], + ncomp = ncomp_input, + scale = FALSE, + mode = "canonical" + ) + ) + + transforms <- matrix_to_m_transforms(diag(1)) + tensor_pls <- tpls( + test_x, + test_y, + ncomp = ncomp_input, + m = transforms$m, + minv = transforms$minv, + mode = "canonical" + ) + + expect_equal( + .make_signs_consistent(mixomics_pls$loadings$X), + .make_signs_consistent(tensor_pls$x_loadings) + ) + + expect_equal( + .make_signs_consistent(mixomics_pls$loadings$Y), + .make_signs_consistent(tensor_pls$y_loadings) + ) + + expect_equal( + .make_signs_consistent(mixomics_pls$variates$X), + .make_signs_consistent(tensor_pls$x_projected) + ) + + expect_equal( + .make_signs_consistent(mixomics_pls$variates$Y), + .make_signs_consistent(tensor_pls$y_projected) + ) + } +) + +test_that( + "regression: tpls agrees with MixOmics pls", + code = { + n <- 4 + p <- 5 + q <- 7 + t <- 1 + k <- min(n, p, q) + ncomp_input <- 2 + + set.seed(1) + test_x <- array(rnorm(n * p * t, mean = 0, sd = 5), dim = c(n, p, t)) + test_y <- array(rnorm(n * q * t, mean = 0, sd = 3), dim = c(n, q, t)) + + # suppress the warning here "At least one study has less than 5 samples, + # mean centering might not do as expected" + suppressWarnings( + mixomics_pls <- pls( + test_x[, , 1], + test_y[, , 1], + ncomp = ncomp_input, + scale = FALSE, + mode = "regression" + ) + ) + + transforms <- matrix_to_m_transforms(diag(1)) + tensor_pls <- tpls( + test_x, + test_y, + ncomp = ncomp_input, + m = transforms$m, + minv = transforms$minv, + mode = "regression" + ) + + expect_equal( + .make_signs_consistent(mixomics_pls$loadings$X), + .make_signs_consistent(tensor_pls$x_loadings) + ) + + expect_equal( + .make_signs_consistent(mixomics_pls$loadings$Y), + .make_signs_consistent(tensor_pls$y_loadings) + ) + + expect_equal( + .make_signs_consistent(mixomics_pls$variates$X), + .make_signs_consistent(tensor_pls$x_projected) + ) + + expect_equal( + .make_signs_consistent(mixomics_pls$variates$Y), + .make_signs_consistent(tensor_pls$y_projected) + ) + } +) diff --git a/tests/testthat/test-tens.tsvdm.R b/tests/testthat/test-tens.tsvdm.R new file mode 100644 index 00000000..e358598d --- /dev/null +++ b/tests/testthat/test-tens.tsvdm.R @@ -0,0 +1,37 @@ +context("tsvdm") +# bltodo: add tests for permutations of different configurations and input sizes +# like the `tred` library + +test_that( + "reconstructing original matrix from tsvdm", + code = { + test_tensor <- array(1:24, dim = c(2, 4, 3)) + tsvdm_decomposition <- tsvdm(test_tensor) + u <- tsvdm_decomposition$u + s <- tsvdm_decomposition$s + v <- tsvdm_decomposition$v + vt <- ft(v) + + expect_equal( + test_tensor, + m_product(u, s, vt) + ) + } +) + +test_that( + "reconstructing original matrix from tsvdm without transform", + code = { + test_tensor <- array(1:24, dim = c(2, 4, 3)) + tsvdm_decomposition <- tsvdm(test_tensor, transform = FALSE) + u <- tsvdm_decomposition$u + s <- tsvdm_decomposition$s + v <- tsvdm_decomposition$v + vt <- ft(v) + + expect_equal( + test_tensor, + facewise_product(u, s, vt) + ) + } +)