Skip to content

Commit

Permalink
[ADAM-1783] Resolve check issues that block pushing to CRAN.
Browse files Browse the repository at this point in the history
Resolves #1783. Cleans up a host of documentation issues that would cause
warnings when submitting to CRAN.
  • Loading branch information
fnothaft committed Jan 3, 2018
1 parent 052b0ee commit 79ed153
Show file tree
Hide file tree
Showing 11 changed files with 438 additions and 164 deletions.
8 changes: 4 additions & 4 deletions adam-r/bdgenomics.adam/DESCRIPTION
Original file line number Diff line number Diff line change
Expand Up @@ -5,17 +5,17 @@ Title: R Frontend for Big Data Genomics/ADAM
Description: ADAM is a genomics analysis platform with specialized file formats built using Apache Avro, Apache Spark and Parquet.
Author: Big Data Genomics
Maintainer: Frank Austin Nothaft <fnothaft@alumni.stanford.edu>
Authors@R: c(person("Frank", "Nothaft", role = c("aut", "cre"),
Authors@R: c(person("Frank Austin", "Nothaft", role = c("aut", "cre"),
email = "fnothaft@alumni.stanford.edu"),
person(family = "Big Data Genomics", role = c("aut", "cph")))
License: Apache License (== 2.0)
URL: http://www.bdgenomics.org https://github.com/bigdatagenomics/adam
BugReports: https://github.com/bigdatagenomics/adam/issues
Imports:
methods
Depends:
R (>= 3.0),
methods,
SparkR (>= 2.1.0)
Depends:
R (>= 3.0)
Suggests:
testthat
Collate:
Expand Down
39 changes: 39 additions & 0 deletions adam-r/bdgenomics.adam/NAMESPACE
Original file line number Diff line number Diff line change
@@ -1,6 +1,40 @@
# Generated by roxygen2: do not edit by hand

export(ADAMContext)
export(aggregatedCoverage)
export(collapse)
export(countKmers)
export(coverage)
export(createADAMContext)
export(flankAdjacentFragments)
export(flatten)
export(loadAlignments)
export(loadContigFragments)
export(loadCoverage)
export(loadFeatures)
export(loadFragments)
export(loadGenotypes)
export(loadVariants)
export(markDuplicates)
export(pipe)
export(realignIndels)
export(recalibrateBaseQualities)
export(save)
export(saveAsParquet)
export(saveAsSam)
export(saveAsVcf)
export(sort)
export(sortLexicographically)
export(sortReadsByReferencePosition)
export(sortReadsByReferencePositionAndIndex)
export(toCoverage)
export(toDF)
export(toFeatures)
export(toFragments)
export(toReads)
export(toVariantContexts)
export(transform)
export(transmute)
exportClasses(ADAMContext)
exportClasses(AlignmentRecordRDD)
exportClasses(CoverageRDD)
Expand Down Expand Up @@ -45,3 +79,8 @@ exportMethods(toReads)
exportMethods(toVariantContexts)
exportMethods(transform)
exportMethods(transmute)
importFrom(SparkR,sparkR.callJMethod)
importFrom(SparkR,sparkR.callJStatic)
importFrom(SparkR,sparkR.newJObject)
importFrom(SparkR,sparkR.session)
importFrom(methods,new)
37 changes: 37 additions & 0 deletions adam-r/bdgenomics.adam/R/adam-context.R
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,32 @@ setOldClass("jobj")
#' @description The ADAMContext provides helper methods for loading in genomic
#' data into a Spark RDD/Dataframe.
#' @slot jac Java object reference to the backing JavaADAMContext.
#'
#' @rdname ADAMContext
#'
#' @export
setClass("ADAMContext",
slots = list(jac = "jobj"))

#' Creates an ADAMContext by creating a SparkSession.
#'
#' @return Returns an ADAMContext.
#'
#' @importFrom SparkR sparkR.session
#'
#' @export
createADAMContext <- function() {
ADAMContext(sparkR.session())
}

#' Creates an ADAMContext from an existing SparkSession.
#'
#' @param ss The Spark Session to use to create the ADAMContext.
#' @return Returns an ADAMContext.
#'
#' @importFrom SparkR sparkR.callJMethod sparkR.newJObject
#' @importFrom methods new
#'
#' @export
ADAMContext <- function(ss) {
ssc = sparkR.callJMethod(ss, "sparkContext")
Expand All @@ -36,6 +58,7 @@ ADAMContext <- function(ss) {
new("ADAMContext", jac = jac)
}

#' @importFrom SparkR sparkR.callJStatic
javaStringency <- function(stringency) {
stringency <- sparkR.callJStatic("htsjdk.samtools.ValidationStringency",
"valueOf",
Expand All @@ -62,6 +85,8 @@ javaStringency <- function(stringency) {
#' @param stringency The validation stringency to apply. Defaults to STRICT.
#' @return Returns an RDD containing reads.
#'
#' @importFrom SparkR sparkR.callJMethod
#'
#' @export
setMethod("loadAlignments",
signature(ac = "ADAMContext", filePath = "character"),
Expand All @@ -85,6 +110,8 @@ setMethod("loadAlignments",
#' @param filePath The path to load the file from.
#' @return Returns an RDD containing sequence fragments.
#'
#' @importFrom SparkR sparkR.callJMethod
#'
#' @export
setMethod("loadContigFragments",
signature(ac = "ADAMContext", filePath = "character"),
Expand All @@ -109,6 +136,8 @@ setMethod("loadContigFragments",
#' @param stringency The validation stringency to apply. Defaults to STRICT.
#' @return Returns an RDD containing sequence fragments.
#'
#' @importFrom SparkR sparkR.callJMethod
#'
#' @export
setMethod("loadFragments",
signature(ac = "ADAMContext", filePath = "character"),
Expand Down Expand Up @@ -141,6 +170,8 @@ setMethod("loadFragments",
#' @param stringency The validation stringency to apply. Defaults to STRICT.
#' @return Returns an RDD containing features.
#'
#' @importFrom SparkR sparkR.callJMethod
#'
#' @export
setMethod("loadFeatures",
signature(ac = "ADAMContext", filePath = "character"),
Expand Down Expand Up @@ -174,6 +205,8 @@ setMethod("loadFeatures",
#' @param stringency The validation stringency to apply. Defaults to STRICT.
#' @return Returns an RDD containing coverage.
#'
#' @importFrom SparkR sparkR.callJMethod
#'
#' @export
setMethod("loadCoverage",
signature(ac = "ADAMContext", filePath = "character"),
Expand All @@ -196,6 +229,8 @@ setMethod("loadCoverage",
#' @param stringency The validation stringency to apply. Defaults to STRICT.
#' @return Returns an RDD containing genotypes.
#'
#' @importFrom SparkR sparkR.callJMethod
#'
#' @export
setMethod("loadGenotypes",
signature(ac = "ADAMContext", filePath = "character"),
Expand All @@ -218,6 +253,8 @@ setMethod("loadGenotypes",
#' @param stringency The validation stringency to apply. Defaults to STRICT.
#' @return Returns an RDD containing variants.
#'
#' @importFrom SparkR sparkR.callJMethod
#'
#' @export
setMethod("loadVariants",
signature(ac = "ADAMContext", filePath = "character"),
Expand Down
Loading

0 comments on commit 79ed153

Please sign in to comment.