Skip to content

Commit

Permalink
Refactoring parsing/tokenization (#1537)
Browse files Browse the repository at this point in the history
  • Loading branch information
hadley authored Nov 17, 2023
1 parent 5741e99 commit 56c5d5a
Show file tree
Hide file tree
Showing 2 changed files with 41 additions and 39 deletions.
35 changes: 18 additions & 17 deletions R/block.R
Original file line number Diff line number Diff line change
Expand Up @@ -73,21 +73,20 @@ print.roxy_block <- function(x, ...) {
cat_line(" ", obj[-1])
}

block_create <- function(tokens, call, srcref) {

pkgenv <- roxy_meta_get("env")
# This should only happen in our test cases
if (is.null(pkgenv)) pkgenv <- baseenv()
evalenv <- new.env(parent = pkgenv)
roxy_meta_set("evalenv", evalenv)
on.exit(roxy_meta_set("evalenv", NULL), add = TRUE)
block_create <- function(call, srcref, tokens = c()) {
if (is_empty(tokens)) {
return(NULL)
}

tags <- parse_tags(tokens)
if (length(tags) == 0) return()
if (length(tags) == 0) {
return(NULL)
}

roxy_block(tags,
roxy_block(
tags = tags,
file = attr(srcref, "srcfile")$filename,
line = as.vector(srcref)[[1]],
line = srcref[[1]],
call = call
)
}
Expand Down Expand Up @@ -206,15 +205,17 @@ block_replace_tags <- function(block, tags, values) {
# parsing -----------------------------------------------------------------

parse_tags <- function(tokens) {
# Set up evaluation environment for markdown
pkgenv <- roxy_meta_get("env") %||% baseenv()
evalenv <- new.env(parent = pkgenv)
roxy_meta_set("evalenv", evalenv)
on.exit(roxy_meta_set("evalenv", NULL), add = TRUE)

markdown_activate(tokens)

tokens <- parse_description(tokens)

out <- vector("list", length(tokens))
for (i in seq_along(tokens)) {
out[[i]] <- roxy_tag_parse(tokens[[i]])
}
compact(out)
tokens <- map(tokens, roxy_tag_parse)
compact(tokens)
}

#' @export
Expand Down
45 changes: 23 additions & 22 deletions R/tokenize.R
Original file line number Diff line number Diff line change
@@ -1,31 +1,28 @@
# Returns list of roxy_blocks
tokenize_file <- function(file, srcref_path = NULL) {
lines <- read_lines(file)
tokenize_file <- function(path, srcref_path = NULL) {
lines <- read_lines(path)
calls <- parse_lines(lines, srcref_path %||% path)
srcrefs <- utils::getSrcref(calls)

parsed <- parse(
text = lines,
keep.source = TRUE,
srcfile = srcfilecopy(srcref_path %||% file, lines, isFile = TRUE)
)
if (length(parsed) == 0)
return(list())

refs <- utils::getSrcref(parsed)

comment_refs <- comments(refs)
comment_refs <- comments(srcrefs)
tokens <- lapply(comment_refs, tokenise_ref)

has_tokens <- !purrr::map_lgl(tokens, purrr::is_empty)
blocks <- map(seq_along(tokens), function(i) {
block_create(
call = calls[[i]],
srcref = srcrefs[[i]],
tokens = tokens[[i]]
)
})
compact(blocks)
}

blocks <- purrr::pmap(
list(
call = as.list(parsed)[has_tokens],
srcref = refs[has_tokens],
tokens = tokens[has_tokens]
),
block_create
parse_lines <- function(lines, path) {
parse(
text = lines,
keep.source = TRUE,
srcfile = srcfilecopy(path, lines, isFile = TRUE)
)
purrr::compact(blocks)
}

tokenise_ref <- function(x) {
Expand All @@ -38,6 +35,10 @@ tokenise_ref <- function(x) {

# For each src ref, find the comment block preceding it
comments <- function(refs) {
if (length(refs) == 0) {
return(list())
}

srcfile <- attr(refs[[1]], "srcfile")

# first_line, first_byte, last_line, last_byte
Expand Down

0 comments on commit 56c5d5a

Please sign in to comment.