Skip to content

Commit

Permalink
fixed export statements for functions
Browse files Browse the repository at this point in the history
  • Loading branch information
hechth committed May 3, 2023
1 parent 0f0bae4 commit b5a61ce
Show file tree
Hide file tree
Showing 7 changed files with 42 additions and 16 deletions.
20 changes: 17 additions & 3 deletions NAMESPACE
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
# Generated by roxygen2: do not edit by hand

S3method(solve,a)
S3method(solve,sigma)
export(adaptive.bin)
export(adjust.time)
export(aggregate_by_rt)
Expand All @@ -16,6 +14,9 @@ export(compute_breaks_3)
export(compute_chromatographic_profile)
export(compute_clusters)
export(compute_clusters_simple)
export(compute_comb)
export(compute_corrected_features)
export(compute_curr_rec_with_enough_peaks)
export(compute_delta_rt)
export(compute_densities)
export(compute_dx)
Expand All @@ -27,33 +28,43 @@ export(compute_mass_density)
export(compute_mass_values)
export(compute_mu_sc_std)
export(compute_mz_sd)
export(compute_peaks_and_valleys)
export(compute_pks_vlys_rt)
export(compute_rectangle)
export(compute_rt_intervals_indices)
export(compute_scale)
export(compute_sel)
export(compute_start_bound)
export(compute_target_times)
export(compute_template)
export(compute_template_adjusted_rt)
export(compute_uniq_grp)
export(correct_time)
export(count_peaks)
export(create_aligned_feature_table)
export(create_output)
export(create_rows)
export(draw_rt_correction_plot)
export(draw_rt_normal_peaks)
export(duplicate.row.remove)
export(fill_missing_values)
export(filter_based_on_density)
export(find.turn.point)
export(find_local_maxima)
export(find_mz_match)
export(find_mz_tolerance)
export(find_optima)
export(get_custom_rt_tol)
export(get_features_in_rt_range)
export(get_mzrange_bound_indices)
export(get_num_workers)
export(get_rt_region_indices)
export(get_single_occurrence_mask)
export(get_times_to_use)
export(hybrid)
export(increment_counter)
export(interpol.area)
export(l2normalize)
export(label_val_to_keep)
export(load.lcms)
export(load_aligned_features)
Expand All @@ -69,18 +80,21 @@ export(plot_raw_profile_histogram)
export(plot_rt_profile)
export(predict_mz_break_indices)
export(predict_smoothed_rt)
export(prep.uv)
export(prep_uv)
export(preprocess_bandwidth)
export(preprocess_profile)
export(prof.to.features)
export(recover.weaker)
export(refine_selection)
export(remove_noise)
export(rev_cum_sum)
export(rm.ridge)
export(run_filter)
export(select_mz)
export(select_rt)
export(semi.sup)
export(solve_a)
export(solve_sigma)
export(sort_data)
export(span)
export(two.step.hybrid)
Expand Down
5 changes: 5 additions & 0 deletions R/adjust.time.R
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
NULL
#> NULL

#' @export
compute_comb <- function(template_features, features) {
combined <- dplyr::bind_rows(
template_features,
Expand All @@ -11,6 +12,7 @@ compute_comb <- function(template_features, features) {
return(combined)
}

#' @export
compute_sel <- function(combined, mz_tol_relative, rt_tol_relative) {
l <- nrow(combined)
sel <- which(combined$mz[2:l] - combined$mz[1:(l - 1)] <
Expand All @@ -20,6 +22,7 @@ compute_sel <- function(combined, mz_tol_relative, rt_tol_relative) {
return(sel)
}

#' @export
compute_template_adjusted_rt <- function(combined, sel, j) {
all_features <- cbind(combined$rt[sel], combined$rt[sel + 1])
flip_indices <- which(combined$sample_id[sel] == j)
Expand All @@ -34,6 +37,7 @@ compute_template_adjusted_rt <- function(combined, sel, j) {
return(all_features)
}

#' @export
compute_corrected_features <- function(features, delta_rt, avg_time) {
features <- features[order(features$rt, features$mz), ]
corrected <- features$rt
Expand All @@ -58,6 +62,7 @@ compute_corrected_features <- function(features, delta_rt, avg_time) {
return(features)
}

#' @export
fill_missing_values <- function(orig.feature, this.feature) {
missing_values <- which(is.na(this.feature$rt))
for (i in missing_values) {
Expand Down
12 changes: 6 additions & 6 deletions R/prof.to.features.R
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ compute_gaussian_peak_shape <- function(rt_profile, bw, component_eliminate, BIC
#' @param sigma.1 left standard deviation of the gaussian curve
#' @param sigma.2 right standard deviation of the gaussian curve
#' @export
solve.a <- function(x, t, a, sigma.1, sigma.2) {
solve_a <- function(x, t, a, sigma.1, sigma.2) {
# This function is a part of bigauss.esti.EM and is not covered by any of test-cases
w <- x * (as.numeric(t < a) / sigma.1 + as.numeric(t >= a) / sigma.2)
return(sum(t * w) / sum(w))
Expand All @@ -115,7 +115,7 @@ solve.a <- function(x, t, a, sigma.1, sigma.2) {
#' @param t A vector of numerical values (rt).
#' @param a A vector of peak summits.
#' @export
prep.uv <- function(x, t, a) {
prep_uv <- function(x, t, a) {
# This function is a part of bigauss.esti.EM and is not covered by any of test-cases
temp <- (t - a)^2 * x
u <- sum(temp * as.numeric(t < a))
Expand All @@ -138,9 +138,9 @@ prep.uv <- function(x, t, a) {
#' \item standard deviation at the right side of the gaussian curve
#' }
#' @export
solve.sigma <- function(x, t, a) {
solve_sigma <- function(x, t, a) {
# This function is a part of bigauss.esti.EM and is not covered by any of test-cases
tt <- prep.uv(x, t, a)
tt <- prep_uv(x, t, a)
sigma.1 <- tt$u / tt$x.sum * ((tt$v / tt$u)^(1 / 3) + 1)
sigma.2 <- tt$v / tt$x.sum * ((tt$u / tt$v)^(1 / 3) + 1)
return(list(
Expand Down Expand Up @@ -192,11 +192,11 @@ bigauss.esti.EM <- function(t, x, max.iter = 50, epsilon = 0.005, do.plot = FALS
while ((change > epsilon) & (n.iter < max.iter)) {
a.old <- a.new
n.iter <- n.iter + 1
sigma <- solve.sigma(x, t, a.old)
sigma <- solve_sigma(x, t, a.old)
if (n.iter == 1) {
sigma[is.na(sigma)] <- as.numeric(sigma[which(!is.na(sigma))])[1] / 10
}
a.new <- solve.a(x, t, a.old, sigma$sigma.1, sigma$sigma.2)
a.new <- solve_a(x, t, a.old, sigma$sigma.1, sigma$sigma.2)
change <- abs(a.old - a.new)
}
d <- x
Expand Down
9 changes: 9 additions & 0 deletions R/recover.weaker.R
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ compute_delta_rt <- function(times) {
#' @description x / sum(x)
#' @param x Data to normalize.
#' @return Normalized data.
#' @export
l2normalize <- function(x) {
x / sum(x)
}
Expand Down Expand Up @@ -157,6 +158,7 @@ compute_target_times <- function(aligned_rts,
#' checks which values only occur a single time.
#' @param values vector Values for which to compute the mask.
#' @return vector Boolean vector which is the mask of values occuring only once.
#' @export
get_single_occurrence_mask <- function(values) {
ttt <- table(values)
mask <- values %in% as.numeric(names(ttt)[ttt == 1])
Expand Down Expand Up @@ -275,6 +277,7 @@ get_rt_region_indices <- function(target_time, features, rt_tol) {
#' \item pks - vector - The data points at which the density peaks.
#' \item vlys - vector - The points in the data where the density is low
#' (forming a valley in the function).
#' @export
get_features_in_rt_range <- function(features, times, bw) {
time_curve <- times[between(times, min(features$rt), max(features$rt))]

Expand All @@ -297,6 +300,7 @@ get_features_in_rt_range <- function(features, times, bw) {
#' @param roi list Named list with vectors `pks` and `vlys`.
#' @param times vector Retention time values
#' @return vector Numbers of peaks within each region defined by a peak and the two valley points.
#' @export
count_peaks <- function(roi, times) {
num_peaks <- rep(0, length(roi$pks))

Expand All @@ -319,6 +323,7 @@ count_peaks <- function(roi, times) {
#' \item pks - vector - The data points at which the density peaks with at least `recover_min_count` peaks between the valley points.
#' \item vlys - vector - The points in the data where the density is low
#' (forming a valley in the function).
#' @export
compute_pks_vlys_rt <- function(features, times, bandwidth, target_rt, recover_min_count) {
roi <- get_features_in_rt_range(
features,
Expand Down Expand Up @@ -383,6 +388,7 @@ compute_mu_sc_std <- function(features, aver_diff) {
#' @param delta_rt vector Differences between consecutive retention time values (diff(times)).
#' @importFrom dplyr between
#' @return list Triplet of mz, label and intensity for the feature.
#' @export
compute_curr_rec_with_enough_peaks <- function(mz,
peak,
valleys,
Expand Down Expand Up @@ -448,6 +454,7 @@ compute_boundaries <- function(valley_points, peak) {
#' \item vlys - vector - The points in the data where the density is low
#' (forming a valley in the function).
#' }
#' @export
compute_peaks_and_valleys <- function(dens) {
turns <- find.turn.point(dens$y)
pks <- dens$x[turns$pks] # mz values with highest density
Expand All @@ -474,6 +481,7 @@ compute_peaks_and_valleys <- function(dens) {
#' @param min_bandwidth float Minimum bandwidth to use.
#' @param max_bandwidth float Maximum bandwidth to use.
#' @return tibble Tibble with `mz`, `rt` and `intensities` columns.
#' @export
compute_rectangle <- function(data_table,
aligned_feature_mz,
breaks,
Expand Down Expand Up @@ -602,6 +610,7 @@ compute_rectangle <- function(data_table,
#' @param rt_tol float Retention time tolerance.
#' @param mz_tol float Mz tolerance to use.
#' @return int Index of value in rectable closest to `target_rt` and `aligned_mz`.
#' @export
refine_selection <- function(target_rt, rectangle, aligned_mz, rt_tol, mz_tol) {
if (!is.na(target_rt)) {
rt_term <- (rectangle$rt - target_rt)^2 / rt_tol^2
Expand Down
8 changes: 3 additions & 5 deletions R/utils.R
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@ register_functions_to_cluster <- function(cluster) {
'compute_e_step',
'compute_start_bound',
'compute_end_bound',
'compute_bounds',
'compute_scale',
'span',
'compute_uniq_grp',
Expand All @@ -55,17 +54,16 @@ register_functions_to_cluster <- function(cluster) {
"create_output",
"comb",
'bigauss.esti.EM',
'solve.sigma',
'prep.uv',
'solve.a',
'solve_sigma',
'prep_uv',
'solve_a',
'correct_time',
'compute_comb',
'compute_sel',
'compute_template_adjusted_rt',
'compute_corrected_features',
'fill_missing_values',
'recover.weaker',
'load_file',
'get_custom_rt_tol',
'compute_target_times',
'predict_mz_break_indices',
Expand Down
2 changes: 1 addition & 1 deletion tests/testthat/test-find.tol.time.R
Original file line number Diff line number Diff line change
Expand Up @@ -17,5 +17,5 @@ test_that("compute_rt_tol_relative computes something", {
max.bins
)

expect_equal(actual, 1.0416)
expect_equal(actual, 1.04167, tolerance=0.001)
})
2 changes: 1 addition & 1 deletion tests/testthat/test-find_mz_tolerance.R
Original file line number Diff line number Diff line change
Expand Up @@ -11,5 +11,5 @@ test_that("mz tolerance is found", {
do.plot = FALSE
)

expect_equal(mz_tol_relative, 0.0166409666685641)
expect_equal(mz_tol_relative, 0.01664097, tolerance=0.001)
})

0 comments on commit b5a61ce

Please sign in to comment.