Skip to content

Commit

Permalink
Merge pull request #53 from m-jahn/dev
Browse files Browse the repository at this point in the history
Update to v0.1.3
  • Loading branch information
m-jahn authored Nov 5, 2024
2 parents 3a60127 + 34c5df2 commit f7ba8e4
Show file tree
Hide file tree
Showing 35 changed files with 445 additions and 221 deletions.
4 changes: 2 additions & 2 deletions DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
Package: WeightedTreemaps
Title: Generate and Plot Voronoi or Sunburst Treemaps from Hierarchical
Data
Version: 0.1.2
Version: 0.1.3
Authors@R: c(
person("Michael", "Jahn", , "jahn@mpusp.mpg.de", role = c("aut", "cre"),
comment = c(ORCID = "0000-0002-3913-153X")),
Expand Down Expand Up @@ -54,5 +54,5 @@ VignetteBuilder:
knitr
Encoding: UTF-8
LazyData: true
RoxygenNote: 7.2.3
RoxygenNote: 7.3.2
SystemRequirements: C++17
9 changes: 9 additions & 0 deletions NEWS.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,14 @@
# WeightedTreemaps 0.1.3

- fixed links to external packages
- several bug fixes and improvements to catch errors when parsing polygons
- added option to converge with different speed

# WeightedTreemaps 0.1.2

- updated README
- vignette: reduced example sizes to improve building time

# WeightedTreemaps 0.1.1

- The package was prepared for release on CRAN
Expand Down
78 changes: 51 additions & 27 deletions R/allocate.R
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#' @importFrom stats rnorm
#' @importFrom dplyr %>%
#' @importFrom sf st_area
#'
#'
cellError <- function(a, target) {
normA <- a / sum(a)
diff <- abs(normA - target)
Expand All @@ -14,9 +14,9 @@ breaking <- function(
debug = FALSE,
error_tol,
prevError) {

if (max) {

# Stop when largest individual cell error is less than 1%
# (the default)
err <- cellError(a, target)
Expand All @@ -29,7 +29,7 @@ breaking <- function(
prevError <- err

} else {

normA <- a / sum(a)
diff <- abs(normA - target)
# Stop when *change* in *total* cell error is tiny
Expand All @@ -46,7 +46,7 @@ breaking <- function(
))
stopping <- abs(sum(diff) - prevError) < 0.001
prevError <- sum(diff)

}
list(
stopping = stopping,
Expand All @@ -59,16 +59,22 @@ breaking <- function(
# adjust by multiple of average absolute weights
# This avoids problem of getting stuck at a tiny weight
# (and stabilizes the algorithm generally)
# difference to original implementation: adjustment of maximal
# difference to original implementation: adjustment of maximal
# step change of weights to prevent crashing of algorithm
adjustWeights <- function(w, a, target) {
adjustWeights <- function(w, a, target, convergence) {
# OPTION: avoid extreme scaling values -> squareroot function
# to buffer strong difference between computed area and target
# and to buffer the global weight increase
# these increase stability but also computation time:
normA <- a / sum(a)
scaling <- ((target - normA) / target)
scaling <- ifelse(scaling < -1, scaling/sqrt(abs(scaling)), scaling)
if (convergence == "slow") {
scaling <- ifelse(scaling < -1, -log10(abs(scaling)), scaling)
} else if (convergence == "intermediate") {
scaling <- ifelse(scaling < -1, -log(abs(scaling)), scaling)
} else if (convergence == "fast") {
scaling <- ifelse(scaling < -1, scaling/sqrt(abs(scaling)), scaling)
}
w + sqrt(mean(abs(w))) * scaling
}

Expand Down Expand Up @@ -115,34 +121,52 @@ shiftWeights <- function(s, w) {
# just give up after 'maxIteration's
allocate <- function(
names, s, w, outer, target,
maxIteration,
maxIteration,
error_tol,
debug = FALSE,
convergence,
min_target = 0.01,
debug = FALSE,
debugCell = FALSE)
{
count <- 1
prevError <- 1


# check for extremely small cell size compared to theoretical average
target_fc <- target * length(target)
too_small <- target_fc < min_target
if (any(too_small)) {
message(paste0(
"Found extremely small cell (<", round(min_target * 100, 1), "% of average size);\n",
"inflating cell size to prevent failure when calculating polygons."
))
correction <- ifelse(
too_small,
min_target/length(target),
-min_target/length(target) * sum(too_small)/sum(!too_small)
)
target <- target + correction
}

repeat {

# if all weights are identical the CGAL algorithm often fails
# in this case we introduce a bit of random variation
if (length(unique(w)) == 1) {
w <- w * rnorm(length(w), mean = 1, sd = 0.01)
}

# call to awv function, the additively weighted voronoi tesselation,
# wrapped within a trycatch statement to catch errors and start over
k <- tryCatch(awv(s, w, outer, debug, debugCell),
error = function(e) { print(e); NULL}
error = function(e) { message(e); NULL}
)
if (is.null(k)) {
return(NULL)
}
areas <- lapply(k, sf::st_area)

# if debug=TRUE, every iteration is drawn to the viewport
# this can be very time and resource consuming and should be used
# this can be very time and resource consuming and should be used
# with care. The result resembles the final treemap but is an overlay of
# many iterations
if (debug) {
Expand All @@ -156,7 +180,7 @@ allocate <- function(
lwd = 2, col = grey(0.5),
fill = grey(1, alpha=0.33)
)

info <-
rbind(
area = round(unlist(areas) / sum(unlist(areas)), 4),
Expand All @@ -167,31 +191,31 @@ allocate <- function(
colnames(info) <- names
print(info)
}

stop_cond <- breaking(
unlist(areas),
target,
unlist(areas),
target,
debug = debug,
error_tol = error_tol,
prevError = prevError)

# if stop condition is fulfilled, return result in form of
# list of polygons and metadata
if (count == maxIteration || stop_cond$stopping) {

res <- lapply(1:length(names), function(i) {
list(
name = names[i], poly = k[[i]],
site = c(s$x[[i]], s$y[[i]]),
weight = w[i], area = unlist(areas)[i],
site = c(s$x[[i]], s$y[[i]]),
weight = w[i], area = unlist(areas)[i],
target = target[i],
count = count)
}) %>% setNames(names)
return(res)

} else {
w <- adjustWeights(w, unlist(areas), target)

w <- adjustWeights(w, unlist(areas), target, convergence)
s <- shiftSites(s, k)
w <- shiftWeights(s, w)
}
Expand Down
2 changes: 1 addition & 1 deletion R/drawTreemap.R
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
#' Default is to use the lowest level cells for Voronoi treemaps and all levels
#' for sunburst treemaps.
#' @param color_palette (character) A character vector of colors used to fill cells.
#' The default is to use \code{\link{rainbow_hcl}} from package \code{colorspace}
#' The default is to use \code{\link[colorspace]{rainbow_hcl}} from
#' @param border_level (numeric) A numeric vector representing the hierarchical level that should be
#' used for drawing cell borders, or NULL to omit drawing borders, The default is
#' that all borders are drawn.
Expand Down
58 changes: 30 additions & 28 deletions R/tesselation.R
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
# Generate one iteration of the Additively Weighted Voronoi diagram
awv <- function(
s, w, region, debug = FALSE,
debugCell = FALSE)
debugCell = FALSE)
{
# combine X, Y coordinates and weights as input for
# C++ tesselation function
Expand All @@ -30,27 +30,29 @@ awv <- function(

tidyCell <-
function(cell, tolerance) {

# if cell touches the border at two points, we need to close it
# this is not necessary if cell touches border at 4 points (like a stripe)
if (sum(
cell$border$x %in% c(4000, -4000),
cell$border$x %in% c(4000, -4000),
cell$border$y %in% c(4000, -4000)) == 2
) {

closeCell(cell$border, cell$vertex, tol = tolerance)

poly <- closeCell(cell$border, cell$vertex, tol = tolerance)
} else {

# return a list of the polygon
list(
poly <- list(
x = cell$border$x,
y = cell$border$y,
end = "boundary"
)

if (sf::st_is_valid(convertCell(poly[1:2]))) {
return(poly)
} else {
message("Found invalid polygon (self-intersection)")
return(NULL)
}
}

}

# SIDES
Expand Down Expand Up @@ -97,7 +99,7 @@ antiSide <- function(corner) {
closeClock <- function(x, y, start, end, scale = 2000) {
cornerX <- c(-2 * scale, 2 * scale, 2 * scale,-2 * scale)
cornerY <- c(2 * scale, 2 * scale,-2 * scale,-2 * scale)

side <- end
repeat {
corner <- clockCorner(side)
Expand All @@ -116,7 +118,7 @@ closeClock <- function(x, y, start, end, scale = 2000) {
closeAnti <- function(x, y, start, end, scale = 2000) {
cornerX <- c(-2 * scale, 2 * scale, 2 * scale,-2 * scale)
cornerY <- c(2 * scale, 2 * scale,-2 * scale,-2 * scale)

side <- end
repeat {
corner <- antiCorner(side)
Expand Down Expand Up @@ -155,40 +157,36 @@ closeCell <- function(cell, vertex, tol, scale = 2000) {
# If not, do second one (and check that vertex is "inside" that result!)
x <- cell$x
y <- cell$y

# ASSUME that both first and last vertices are on boundary!
N <- length(x)
startSide <- side(x[1], y[1])
endSide <- side(x[N], y[N])

# exit if not both end points lie on boundary
if (length(startSide) != 1 | length(endSide) != 1) {
return(NULL)
}

# Start and end on same side
if (startSide == endSide) {

cell <- list(x = x, y = y)

if (sp::point.in.polygon(vertex[1], vertex[2],
cell$x, cell$y) == 0) {
cell$x, cell$y) == 0) {
boundRect <- to_sfpoly(list(
x = c(-2 * scale,-2 * scale, 2 * scale, 2 * scale),
y = c(-2 * scale, 2 * scale, 2 * scale,-2 * scale)
))
# "Subtract" smallCell from bound rect to get largeCell
cellPoly <- to_sfpoly(cell)
cellPoly <- sf::st_difference(boundRect, cellPoly)

pts <- to_coords(cellPoly)
cell <- to_coords(cellPoly)
if (sp::point.in.polygon(vertex[1], vertex[2],
cell$x, cell$y) == 0) {
cell$x, cell$y) == 0) {
stop("Failed to close cell")
}
}

} else {

cell <- closeClock(x, y, startSide, endSide)
if (sp::point.in.polygon(vertex[1], vertex[2],
cell$x, cell$y) == 0) {
Expand Down Expand Up @@ -222,6 +220,10 @@ trimCells <- function(cells, region) {
if (inherits(poly, "MULTIPOLYGON")) {
poly <- suppressWarnings(sf::st_cast(poly, to = "POLYGON"))
}
if (inherits(poly, "GEOMETRYCOLLECTION")) {
valid <- which(sapply(poly, function(x) inherits(x, "POLYGON")))[1]
poly <- poly[[valid]]
}
poly
})
}
Expand All @@ -235,21 +237,21 @@ samplePoints <- function(ParentPoly, n, seed, positioning) {
if (!is.null(seed)) {
set.seed(seed)
}
# This loop keeps repeating until the correct number of coordinates

# This loop keeps repeating until the correct number of coordinates
# is sampled. The reason is that sp::spsample() does not always sample
# the correct number of coordinates, but too few or too many
repeat {

sampled <- tryCatch({
points <- sp::spsample(
sp::Polygon(coords = ParentPoly),
n = n,
n = n,
type = ifelse(positioning == "random", "random", "nonaligned")
)
points@coords}, error = function(e) NULL
)

if (is.null(sampled) || nrow(sampled) != n) {
next
} else {
Expand Down
9 changes: 9 additions & 0 deletions R/voronoiTreemap.R
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,13 @@
#' area. The default is 0.01 (or 1 \%) of the total parental area. Note: this
#' is is different from a relative per-cell error, where 1 \% would be more
#' strict.
#' @param convergence (character) One of "slow", "intermediate", or "fast".
#' Intermediate (default) and fast try to adjust cell weights stronger such
#' that the algorithm converges faster towards the final size of the cell.
#' However this comes at the price of stability, with a larger number of
#' polygons possibly being misformed, e.g. by having self-intersections.
#' Set convergence to "slow" if you experience problems to calculate treemaps
#' with very unequal cell sizes or very large treemaps.
#' @param seed (integer) The default seed is NULL, which will lead to a new
#' random sampling of cell coordinates for each tesselation. If you want
#' a reproducible arrangement of cells, set seed to an arbitrary number.
Expand Down Expand Up @@ -144,6 +151,7 @@ voronoiTreemap <- function(
shape = "rectangle",
maxIteration = 100,
error_tol = 0.01,
convergence = "intermediate",
seed = NULL,
positioning = "regular",
verbose = FALSE,
Expand Down Expand Up @@ -305,6 +313,7 @@ voronoiTreemap <- function(
target = weights,
maxIteration = maxIteration,
error_tol = error_tol,
convergence = convergence,
outer = sfpoly,
debug = debug
)
Expand Down
Loading

0 comments on commit f7ba8e4

Please sign in to comment.