diff --git a/DESCRIPTION b/DESCRIPTION index 10bb846..45f7b33 100755 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -1,8 +1,8 @@ Package: sotkanet Type: Package Title: Sotkanet Open Data Access and Analysis -Version: 0.9.80.9000 -Date: 2024-05-16 +Version: 0.9.80.9002 +Date: 2024-05-23 Encoding: UTF-8 Authors@R: c(person(given = "Leo", @@ -41,7 +41,9 @@ Imports: curl, jsonlite, lubridate, - RefManageR + RefManageR, + digest, + frictionless Suggests: devtools, ggplot2, diff --git a/NAMESPACE b/NAMESPACE index c239e7b..df85f25 100755 --- a/NAMESPACE +++ b/NAMESPACE @@ -1,16 +1,27 @@ # Generated by roxygen2: do not edit by hand export(GetDataSotkanet) -export(SotkanetCite) export(SotkanetIndicatorMetadata) export(SotkanetIndicators) export(SotkanetRegions) +export(get_sotkanet) export(sotkanet.csv_query) export(sotkanet.json_query) +export(sotkanet_cite) +export(sotkanet_clean_cache) +export(sotkanet_indicators) +export(sotkanet_interactive) +export(sotkanet_read_cache) +export(sotkanet_regions) +export(sotkanet_write_cache) export(test_connection) export(try_GET) +export(write_frictionless_metadata) importFrom(RefManageR,toBiblatex) importFrom(curl,has_internet) +importFrom(digest,digest) +importFrom(frictionless,add_resource) +importFrom(frictionless,create_package) importFrom(httr,GET) importFrom(httr,build_url) importFrom(httr,content) @@ -22,6 +33,8 @@ importFrom(httr,user_agent) importFrom(jsonlite,fromJSON) importFrom(lubridate,year) importFrom(lubridate,ymd) +importFrom(utils,capture.output) +importFrom(utils,menu) importFrom(utils,person) importFrom(utils,read.csv2) importFrom(utils,toBibtex) diff --git a/R/GetDataSotkanet.R b/R/GetDataSotkanet.R index aee2a40..2a72c66 100755 --- a/R/GetDataSotkanet.R +++ b/R/GetDataSotkanet.R @@ -1,27 +1,27 @@ #' @title Retrieve Sotkanet Data -#' @description +#' @description #' Retrieve selected data and combine into a single table. -#' @details -#' THL's open data license and limitation of liability +#' @details +#' THL's open data license and limitation of liability #' #' \strong{License} -#' -#' The open data provided by National Institute for Health and Welfare is -#' licensed under CC BY 4.0. This license defines how open data can be utilized. +#' +#' The open data provided by National Institute for Health and Welfare is +#' licensed under CC BY 4.0. This license defines how open data can be utilized. #' The licensing is based on a decision made by the Director General. -#' +#' #' \strong{Limitation of Liability} -#' -#' National Institute for Health and Welfare shall not be liable for any loss, -#' legal proceedings, claims, proceedings, demands, costs or damages regardless -#' of their cause or form, which can be directly or indirectly connected to -#' open data or use of open data published by National Institute for Health and +#' +#' National Institute for Health and Welfare shall not be liable for any loss, +#' legal proceedings, claims, proceedings, demands, costs or damages regardless +#' of their cause or form, which can be directly or indirectly connected to +#' open data or use of open data published by National Institute for Health and #' Welfare. #' @param indicators Dataset identifier(s) -#' @param years vector of years, for example `2015:2018` or `c(2010, 2012, ...)` +#' @param years vector of years c(2010, 2012, ... ) #' @param genders vector of genders ('male' | 'female' | 'total') #' @param regions filter by selected regions only (default: all regions) -#' @param region.category filter by one or more of the following 14 valid +#' @param region.category filter by one or more of the following 14 valid #' regions categories (default: all categories) #' \itemize{ #' \item "ALUEHALLINTOVIRASTO" @@ -42,36 +42,36 @@ #' @param user.agent "User agent" defined by the user. Default is NULL which #' will then use the package identifier "rOpenGov/sotkanet" #' @return data.frame -#' @references See citation("sotkanet") +#' @references See citation("sotkanet") #' @author Maintainer: Leo Lahti \email{leo.lahti@@iki.fi}, Pyry Kantanen #' @examples \dontrun{dat <- GetDataSotkanet(indicators = 165)} -#' @seealso +#' @seealso #' For more information about dataset structure, see THL webpage at #' \url{https://yhteistyotilat.fi/wiki08/pages/viewpage.action?pageId=27557907} -#' +#' #' THL open data license website: \url{https://yhteistyotilat.fi/wiki08/x/AAadAg} -#' +#' #' @keywords utilities #' @export -GetDataSotkanet <- function(indicators = NULL, - years = 1991:2015, - genders = c("total"), - regions = NULL, +GetDataSotkanet <- function(indicators = NULL, + years = 1991:2015, + genders = c("total"), + regions = NULL, region.category = NULL, user.agent = NULL) { - + if (is.null(indicators)){ message("Parameter 'indicators' is NULL. Please provide at least one indicator.") return(invisible(NULL)) } # List all indicators in Sotkanet database - sotkanet_indicators <- SotkanetIndicators(id = indicators, + sotkanet_indicators <- SotkanetIndicators(id = indicators, type = "table") sotkanet_regions <- SotkanetRegions(type = "table") - + dats <- list() - + for (indicator in indicators) { # Gather URL parts # parsing the csv file is more straightforward in this context @@ -79,66 +79,65 @@ GetDataSotkanet <- function(indicators = NULL, sotkanet_uri <- "/1.1/csv" all_params <- c(indicator, years, genders) names(all_params) <- c("indicator", - rep("years", length(years)), + rep("years", length(years)), rep("genders", length(genders))) all_params <- as.list(all_params) - + # Construct URL url_object <- httr::parse_url(sotkanet_url) path <- paste(url_object$path, sotkanet_uri, sep = "") url_object$path <- path url_object$query <- all_params final_url <- httr::build_url(url_object) - + y <- sotkanet.csv_query(final_url, user.agent = user.agent) - + if (is.null(y)){ message(paste(" There was a problem retrieving indicator from", indicator," ")) return(invisible(NULL)) } - + dats[[as.character(indicator)]] <- y - + } - + # Merge all data from the different indicators in a single table combined_data <- do.call("rbind", dats) # Add region and indicator information - combined_data$indicator.title.fi <- sotkanet_indicators[match(combined_data$indicator, + combined_data$indicator.title.fi <- sotkanet_indicators[match(combined_data$indicator, sotkanet_indicators$indicator), "indicator.title.fi"] - combined_data$region.title.fi <- sotkanet_regions[match(combined_data$region, + combined_data$region.title.fi <- sotkanet_regions[match(combined_data$region, sotkanet_regions$region), "region.title.fi"] - combined_data$region.code <- sotkanet_regions[match(combined_data$region, - sotkanet_regions$region), "region.code"] - combined_data$region.category <- sotkanet_regions[match(combined_data$region, - sotkanet_regions$region), "region.category"] - combined_data$indicator.organization.title.fi <- sotkanet_indicators[match(combined_data$indicator, + combined_data$region.code <- sotkanet_regions[match(combined_data$region, + sotkanet_regions$region), "region.code"] + combined_data$region.category <- sotkanet_regions[match(combined_data$region, + sotkanet_regions$region), "region.category"] + combined_data$indicator.organization.title.fi <- sotkanet_indicators[match(combined_data$indicator, sotkanet_indicators$indicator), "indicator.organization.title.fi"] - + if (!is.null(regions)){ - if (regions %in% unique(combined_data$region.title.fi)){ - combined_data <- combined_data[which(combined_data$region.title.fi == regions),] + if (any(regions %in% unique(combined_data$region.title.fi))){ + combined_data <- combined_data[which(combined_data$region.title.fi %in% regions),] } else { message(paste("Input for regions not found from dataset:", regions, "\n", "Please check your parameter input for validity and correctness.")) return(invisible(NULL)) } } - + if (!is.null(region.category)){ - if (region.category %in% unique(combined_data$region.category)){ - combined_data <- combined_data[which(combined_data$region.category == region.category),] + if (any(region.category %in% unique(combined_data$region.category))){ + combined_data <- combined_data[which(combined_data$region.category %in% region.category),] } else { - message(paste("Input for region.categories not found from dataset:", region.category, "\n", + message(paste("Input for region.categories not found from dataset:", region.category, "\n", "Please check your parameter input for validity and correctness.")) return(invisible(NULL)) } } - + combined_data } - diff --git a/R/get_sotkanet.R b/R/get_sotkanet.R new file mode 100644 index 0000000..f601392 --- /dev/null +++ b/R/get_sotkanet.R @@ -0,0 +1,219 @@ +#' @title Retrieve Sotkanet Data +#' @description +#' Retrieve selected data and combine into a single table. +#' @details +#' THL's open data license and limitation of liability +#' +#' \strong{License} +#' +#' The open data provided by National Institute for Health and Welfare is +#' licensed under CC BY 4.0. This license defines how open data can be utilized. +#' The licensing is based on a decision made by the Director General. +#' +#' \strong{Limitation of Liability} +#' +#' National Institute for Health and Welfare shall not be liable for any loss, +#' legal proceedings, claims, proceedings, demands, costs or damages regardless +#' of their cause or form, which can be directly or indirectly connected to +#' open data or use of open data published by National Institute for Health and +#' Welfare. +#' +#' @param indicators Dataset identifier(s) +#' @param years vector of years, for example `2015:2018` or `c(2010, 2012, ...)` +#' @param genders vector of genders ('male' | 'female' | 'total') +#' @param regions filter by selected regions only (default: all regions) +#' @param region.category filter by one or more of the following 14 valid +#' regions categories (default: all categories) +#' \itemize{ +#' \item "ALUEHALLINTOVIRASTO" +#' \item "ELY-KESKUS" +#' \item "ERVA" +#' \item "EURALUEET" (Eurozone) +#' \item "EUROOPPA" (Europe) +#' \item "HYVINVOINTIALUE" (welfare country) +#' \item "KUNTA" (municipality) +#' \item "MAA" (country) +#' \item "MAAKUNTA" (region) +#' \item "NUTS1" +#' \item "POHJOISMAAT" (Nordic countries) +#' \item "SAIRAANHOITOPIIRI (hospital district) +#' \item "SEUTUKUNTA" +#' \item "SUURALUE" +#' } +#' @param lang Language of the data variables: indicator.title, region.title +#' and indicator.organization.title. Default is Finnish ("fi"), the other options being +#' English ("en") and Swedish ("sv"). +#' @param user.agent "User agent" defined by the user. Default is NULL which +#' @param cache a logical whether to do caching. Defaults is `TRUE`. +#' @param cache_dir a path to cache directory. `Null` (default) uses and creates +#' "sotkanet" directory in the temporary directory defined by base R [tempdir()] +#' function. The user can set the cache directory to an existing directory with this +#' argument. +#' will then use the package identifier "rOpenGov/sotkanet" +#' @param frictionless a logical whether to return a datapackage, with metadata inside, +#' instead of a data.frame. +#' @return Returns a data.frame when frictionless is `FALSE` and a datapackage +#' when frictionless is `TRUE`. +#' @references See citation("sotkanet") +#' @author Maintainer: Leo Lahti \email{leo.lahti@@iki.fi}, Pyry Kantanen +#' @examples \dontrun{dat <- get_sotkanet(indicators = 165)} +#' @seealso +#' For more information about dataset structure, see THL webpage at +#' \url{https://yhteistyotilat.fi/wiki08/pages/viewpage.action?pageId=27557907} +#' +#' THL open data license website: \url{https://yhteistyotilat.fi/wiki08/x/AAadAg} +#' +#' +#' @importFrom digest digest +#' +#' @keywords utilities +#' @export +get_sotkanet <- function(indicators = NULL, + years = 1991:2015, + genders = c("total"), + regions = NULL, + region.category = NULL, + lang = "fi", + user.agent = NULL, + cache = TRUE, + cache_dir = NULL, + frictionless = FALSE) { + + if (is.null(indicators)){ + message("Parameter 'indicators' is NULL. Please provide at least one indicator.") + return(invisible(NULL)) + } + + #Query for caching + + query <- list( + id = indicators, + years = years, + genders = genders, + regions = regions, + region.category = region.category, + download_date = Sys.Date(), + language = lang + ) + + query_hash <- digest::digest(query, algo = "md5") + + #Check if the data is in cache + + check_cache <- sotkanet_read_cache(cache = cache, cache_dir, query_hash) + + if (!is.null(check_cache)){ + + if (dim(check_cache)[1] == 0){ + + warning("The data.frame is empty") + + } + + if (frictionless){ + + write_frictionless_metadata(indicators, check_cache) + + } else{ + + return(check_cache) + + } + } + + # List all indicators in Sotkanet database + sotkanet_indicators <- sotkanet_indicators(id = indicators, + type = "table", lang = lang) + sotkanet_regions <- sotkanet_regions(type = "table", lang = lang) + + dats <- list() + + for (indicator in indicators) { + # Gather URL parts + # parsing the csv file is more straightforward in this context + sotkanet_url <- "https://sotkanet.fi/rest" + sotkanet_uri <- "/1.1/csv" + all_params <- c(indicator, years, genders) + names(all_params) <- c("indicator", + rep("years", length(years)), + rep("genders", length(genders))) + all_params <- as.list(all_params) + + # Construct URL + url_object <- httr::parse_url(sotkanet_url) + path <- paste(url_object$path, sotkanet_uri, sep = "") + url_object$path <- path + url_object$query <- all_params + final_url <- httr::build_url(url_object) + + y <- sotkanet.csv_query(final_url, user.agent = user.agent) + + if (is.null(y)){ + message(paste(" There was a problem retrieving indicator from", indicator," ")) + return(invisible(NULL)) + } + + dats[[as.character(indicator)]] <- y + + } + + # Merge all data from the different indicators in a single table + combined_data <- do.call("rbind", dats) + + # Add region and indicator information + combined_data$indicator.title <- sotkanet_indicators[match(combined_data$indicator, + sotkanet_indicators$indicator), "indicator.title"] + combined_data$region.title <- sotkanet_regions[match(combined_data$region, + sotkanet_regions$region), "region.title"] + combined_data$region.code <- sotkanet_regions[match(combined_data$region, + sotkanet_regions$region), "region.code"] + combined_data$region.category <- sotkanet_regions[match(combined_data$region, + sotkanet_regions$region), "region.category"] + combined_data$indicator.organization.title <- sotkanet_indicators[match(combined_data$indicator, + sotkanet_indicators$indicator), "indicator.organization.title"] + + if (!is.null(regions)){ + if (regions %in% unique(combined_data$region.title)){ + combined_data <- combined_data[which(combined_data$region.title == regions),] + } else { + message(paste("Input for regions not found from dataset:", regions, "\n", + "Please check your parameter input for validity and correctness.")) + return(invisible(NULL)) + } + } + + if (!is.null(region.category)){ + if (region.category %in% unique(combined_data$region.category)){ + combined_data <- combined_data[which(combined_data$region.category == region.category),] + } else { + message(paste("Input for region.categories not found from dataset:", region.category, "\n", + "Please check your parameter input for validity and correctness.")) + return(invisible(NULL)) + } + } + + #Write the data into cache + + sotkanet_write_cache(cache, cache_dir, query_hash, combined_data) + + #Return the data in asked format + + if (dim(combined_data)[1] == 0){ + + warning("The data.frame is empty") + + } + + if (frictionless){ + + write_frictionless_metadata(indicators, combined_data) + + } else { + + return(combined_data) + + } +} + + + diff --git a/R/SotkanetCite.R b/R/sotkanet_cite.R similarity index 89% rename from R/SotkanetCite.R rename to R/sotkanet_cite.R index b836876..f1127f0 100644 --- a/R/SotkanetCite.R +++ b/R/sotkanet_cite.R @@ -26,18 +26,20 @@ #' SotkanetCite(10013, lang = "en", format = "Bibtex") #' } #' @export -SotkanetCite <- function(id, +sotkanet_cite <- function(id, lang = "fi", format = "Biblatex"){ format <- tolower(as.character(format)) - if(!any(lang %in% c("en", "fi", "sv"))){ - stop("The supported languages are English (en), Finnish (fi) and Swedish (sv).") + info <- SotkanetIndicatorMetadata(id) + + if(is.null(info)){ + stop("The id does not match with any of the datasets") } - if(!any(id %in% SotkanetIndicators()$indicator)){ - stop("The id does not match with any of the datasets.") + if(!any(lang %in% c("en", "fi", "sv"))){ + stop("The supported languages are English (en), Finnish (fi) and Swedish (sv).") } if(!format %in% c("bibentry", "bibtex", "biblatex")){ @@ -45,7 +47,7 @@ SotkanetCite <- function(id, format <- "biblatex" } - info <- SotkanetIndicatorMetadata(id) + urldate <- as.character(Sys.Date()) @@ -62,8 +64,8 @@ SotkanetCite <- function(id, author = utils::person(given = ""), urldate = urldate, type = "Dataset", - note = paste("Accessed", as.Date(urldate), - "dataset last updated", as.Date(last_update_date)) + note = paste0("Accessed ", as.Date(urldate), + ", dataset last updated ", as.Date(last_update_date)) ) if(format == "bibtex"){ diff --git a/R/sotkanet_clean_cache.R b/R/sotkanet_clean_cache.R new file mode 100644 index 0000000..8492a99 --- /dev/null +++ b/R/sotkanet_clean_cache.R @@ -0,0 +1,41 @@ +#' @title Clean sotkanet Cache +#' +#' @description +#' Delete all .rds files from the sotkanet cache directory. +#' +#' @param cache_dir +#' A path to the cache directory. If `NULL` (default) tries to clean default +#' temporary cache directory. +#' +#' @examples +#' \dontrun{ +#' SotkanetCleanCache() +#' } +#' @export +sotkanet_clean_cache <- function(cache_dir = NULL){ + + if (is.null(cache_dir)){ + + cache_dir <- file.path(tempdir(), "sotkanet") + + cache_dir <- path.expand(cache_dir) + } + + if (!dir.exists(cache_dir)){ + message("The cache directory does not exist.") + } else if (dir.exists(cache_dir)){ + files <- list.files(cache_dir, + pattern = "rds", + full.names = TRUE + ) + } + + if (length(files) == 0) { + message("The cache folder ", cache_dir, " is empty.") + } else { + unlink(files) + message("Deleted .rds files from ", cache_dir) + } + + invisible(TRUE) +} diff --git a/R/sotkanet_collect.R b/R/sotkanet_collect.R new file mode 100644 index 0000000..5c4bf49 --- /dev/null +++ b/R/sotkanet_collect.R @@ -0,0 +1,34 @@ +#' @noRd +#' @title Sotkanet from list to data frame +#' @description Convert sotkanet list to data frame. +#' @param x input data (from SotkanetIndicators or SotkanetRegions etc.) +#' @param name name for the column ("indicator", "region", etc.) +#' @param lang language of the variables +#' @return sotkanet data table +#' @references See citation("sotkanet") +#' @author Maintainer: Leo Lahti \email{leo.lahti@@iki.fi} +#' @keywords utilities +sotkanet_collect <- function(x, name, lang) { + + # if (length(x$id) == 1) { + # x <- list(x) + # } + + if (name == "region") { + out <- data.frame(list( + region = x$id, + region.title = x$title[[lang]], + region.code = x$code, + region.category = x$category, + region.uri = x$uri)) + } else if (name == "indicator") { + out <- data.frame(list( + indicator = x$id, + indicator.title = x$title[[lang]], + indicator.organization = x$organization$id, + indicator.organization.title = x$organization$title[[lang]])) + } + + out +} + diff --git a/R/sotkanet_fixity.R b/R/sotkanet_fixity.R new file mode 100644 index 0000000..5c013e6 --- /dev/null +++ b/R/sotkanet_fixity.R @@ -0,0 +1,46 @@ +#' Calculate fixity checksum for an object +#' +#' @description +#' Uses a hash function (md5) on a sotkanet dataset and calculates a digest of the dataset +#' as a character string. +#' +#' @details +#' +#' Fixity can be used to make sure that the file has not changed. This is done by calculating +#' a checksum for the dataset that will change if the dataset changes. The default algorithm +#' used to calculate the checksum is md5 hash, but all the algorithms supported by imported +#' digest function are applicable. See the digest function documentation for more details. +#' +#' This function takes the whole dataset as an input. This means that everything to do with +#' the data is used when calculating the fixity checksum, so for example, if you change +#' the language of the output the checksum will also change. +#' +#' +#' +#' @param data A sotkanet dataset. +#' @param algorithm Algorithm used for calculating the checksum. Default is `md5`, but +#' supports all the algorithms in digest function. +#' +#' @return A character string +#' +#' @seealso [digest::digest()] +#' +#' For more info on fixity checksum see: +#' \url{https://www.dpconline.org/handbook/technical-solutions-and-tools/fixity-and-checksums} +#' +#' @importFrom digest digest +#' +#' @keywords internal +sotkanet_fixity <- function(data, algorithm = "md5"){ + if (!(algorithm %in% c("md5", "sha1", "crc32", "sha265", "sha512", "xxhash32", + "xxhash64", "murmur32", "spookyhash", "blake3", "crc32c"))){ + + stop("Use a valid algorithm. See digest:digest function documentation.") + + } + + fixity <- digest::digest(data, algo = algorithm) + + fixity + +} diff --git a/R/sotkanet_indicators.R b/R/sotkanet_indicators.R new file mode 100644 index 0000000..321ede2 --- /dev/null +++ b/R/sotkanet_indicators.R @@ -0,0 +1,114 @@ +#' @title Sotkanet Indicators +#' @description Retrieve Sotkanet indicator metadata +#' @details Data is fetched from \url{https://sotkanet.fi/rest/1.1/indicators}. +#' @param id Dataset identifier. Default is NULL returning all +#' @param type type output format, either 'table' (default) or 'raw'. +#' Default produces a truncated table with strictly defined columns that +#' are useful in other functions. 'Raw' produces the full output which might +#' be useful for exploratory purposes. +#' @param lang Language of the output. +#' @param user.agent "User agent" defined by the user. Default is NULL which +#' will then use the package identifier "rOpenGov/sotkanet" +#' @param cache a logical whether to do caching. +#' @param cache_dir a path to the cache dir. +#' @return data.frame (type = "table) or a list (type = "raw") +#' +#' @references See citation("sotkanet") +#' +#' @author Leo Lahti \email{leo.lahti@@iki.fi}, Pyry Kantanen +#' @examples +#' \dontrun{ +#' sotkanet.indicators <- sotkanet_indicators(type = "table", lang = "fi") +#' } +#' @importFrom httr parse_url build_url +#' @importFrom digest digest +#' @keywords utilities +#' @export +sotkanet_indicators <- function(id = NULL, type = "table", lang = "fi", user.agent = NULL, + cache = TRUE, cache_dir = NULL) +{ + + if (!(type %in% c("table", "raw"))){ + message("Please use valid type input: 'table' or 'raw'") + return(invisible(NULL)) + } + + indicator_query <- list(id = id, type = type, lang = lang) + indicator_hash <- digest::digest(indicator_query, algo = "md5") + + indicator_cache <- sotkanet_read_cache(cache = cache, cache_dir = cache_dir, indicator_hash) + + if (!is.null(indicator_cache)){ + return(indicator_cache) + } + + # Gather URL parts + sotkanet_url <- "https://sotkanet.fi/rest" + sotkanet_uri <- "/1.1/indicators" + + if (!is.null(id)){ + + if (length(id) > 1){ + res <- lapply(id, FUN=sotkanet_indicators, type = type, lang = lang, user.agent = user.agent) + res <- res[!is.na(res)] + if (type == "table"){ + res <- do.call(rbind.data.frame, res) + } + return(res) + } + + if (type == "raw"){ + sotkanet_uri <- paste(sotkanet_uri, id, sep = "/") + + url_object <- httr::parse_url(sotkanet_url) + path <- paste(url_object$path, sotkanet_uri, sep = "") + url_object$path <- path + final_url <- httr::build_url(url_object) + + res <- sotkanet.json_query(final_url, + flatten = TRUE, + user.agent = user.agent) + + sotkanet_write_cache(cache = cache, cache_dir = cache_dir, indicator_hash, res) + + return(res) + + } else if (type == "table"){ + sotkanet_uri <- paste(sotkanet_uri, id, sep = "/") + url_object <- httr::parse_url(sotkanet_url) + path <- paste(url_object$path, sotkanet_uri, sep = "") + url_object$path <- path + final_url <- httr::build_url(url_object) + + res <- sotkanet.json_query(final_url, + flatten = TRUE, + user.agent = user.agent) + + # res <- res[which(res$id == id),] + + res <- sotkanet_collect(res, "indicator", lang = lang) + + sotkanet_write_cache(cache = cache, cache_dir = cache_dir, indicator_hash, res) + + return(res) + } + } + + # Construct URL + url_object <- httr::parse_url(sotkanet_url) + path <- paste(url_object$path, sotkanet_uri, sep = "") + url_object$path <- path + final_url <- httr::build_url(url_object) + + res <- sotkanet.json_query(final_url, + flatten = TRUE, + user.agent = user.agent) + + if (type == "table") { + res <- sotkanet_collect(res, "indicator", lang = lang) + } + + sotkanet_write_cache(cache = cache, cache_dir = cache_dir, indicator_hash, res) + + res +} diff --git a/R/sotkanet_interactive.R b/R/sotkanet_interactive.R new file mode 100644 index 0000000..2259114 --- /dev/null +++ b/R/sotkanet_interactive.R @@ -0,0 +1,196 @@ +#' Interactive function for get_sotkanet +#' +#' @description +#' A simple interactive function, that helps with downloading Sotkanet data +#' +#' @param code +#' A unique identifier for the dataset of interest. +#' +#' @seealso [get_sotkanet()] +#' +#' @importFrom utils capture.output +#' @importFrom utils menu +#' @export +sotkanet_interactive <- function(code = NULL){ + + lang_selection <- switch( + menu(c("Finnish", "English", "Swedish"), + title = "Select language") + 1, + return(invisible()), + "fi", + "en", + "sv" + ) + + if(is.null(code)) { + search_id <- readline(prompt = "Enter search id for the data: ") + results <- SotkanetIndicatorMetadata(search_id) + + + if (!is.null(results)){ + + choice <- switch( + menu(choices = c(results$title[lang_selection], "No"), + title = "Is this the right dataset?") + 1, + return(invisible()), + TRUE, + FALSE + ) + + if (!choice){ + return(invisible()) + } + + } else { + stop(paste0("\nNo data found with the given search id: "), search_id) + } + + } + + download_selection <- switch( + menu(choices = c("Yes", "No"), + title = "Download the dataset?") + 1, + return(invisible()), + TRUE, + FALSE + ) + + manual_selection <- FALSE + + if(download_selection){ + manual_selection <- switch( + menu(choices = c("Default", "Manually selected"), + title = "Would you like to use default arguments or manually select them?") +1, + return(invisible()), + FALSE, + TRUE + ) + + if (manual_selection) { + years_selection_begin <- readline(prompt = "Enter the beginning year for the data: ") + years_selection_end <- readline(prompt = "Enter the ending year for the data: ") + + years_selection_begin <- as.integer(years_selection_begin) + years_selection_end <- as.integer(years_selection_end) + + years <- years_selection_begin:years_selection_end + + gender_selection <- switch( + menu(choices = c("Male", "Female", "Male & Female", "Total", "All"), + title = "Which genders do you want for the data?") + 1, + return(invisible()), + c("male"), + c("female"), + c("male", "female"), + c("total"), + c("male", "female", "total") + ) + + # region_selection <- readline(prompt = "Enter the regions (empty for default): ") + # + # region.category_selection <- readline(prompt = "Enter the region.category (empty for default): ") + # + # if (region_selection == ""){ + # region_selection <- NULL + # } + # + # if (region.category_selection == ""){ + # region.category_selection <- NULL + # } + + + sotkanet_data <- get_sotkanet(indicators = search_id, + years = years, + genders = gender_selection, + lang = lang_selection) + + + + } else if (!manual_selection){ + sotkanet_data <- get_sotkanet(indicators = search_id, + lang = lang_selection) + + } + + } + + tempfile_for_sink <- tempfile() + + print_citation <- switch( + menu(choices = c("Yes", "No"), + title = "Print dataset citation?") + 1, + return(invisible()), + TRUE, + FALSE + ) + + if (print_citation){ + citation <- sotkanet_cite(id = search_id, + lang = lang_selection) + capture.output(cat("#### DATASET CITATION: \n\n"), + file = tempfile_for_sink, append = TRUE) + capture.output(print(citation), + file = tempfile_for_sink, append = TRUE) + capture.output(cat("\n"), + file = tempfile_for_sink, append = TRUE) + } + + print_code <- switch( + menu(choices = c("Yes", "No"), + title = "Print the code for downloading dataset?") + 1, + return(invisible()), + TRUE, + FALSE + ) + + if (print_code == TRUE && manual_selection == TRUE){ + capture.output(cat("#### DOWNLOAD PARAMETERS: \n\n"), + file = tempfile_for_sink, append = TRUE) + + capture.output(print( + paste0("get_sotkanet(indicators = ", search_id, + ", years = ", years[1], ":", years[length(years)], + ", genders = ", paste0("c(", "'", + paste0(gender_selection, collapse = "', '"), "')"), + ", regions = NULL, region.category = NULL", + ", lang = ", "'", lang_selection, "'", ")") + ), file = tempfile_for_sink, append = TRUE) + capture.output(cat("\n"), file = tempfile_for_sink, append = TRUE) + + } else if (print_code == TRUE && manual_selection == FALSE){ + capture.output(cat("#### DOWNLOAD PARAMETERS: \n\n"), + file = tempfile_for_sink, append = TRUE) + capture.output(print( + paste0("get_sotkanet(indicators = ", search_id, + ", lang = ", "'", lang_selection, "'", ")") + ), file = tempfile_for_sink, append = TRUE) + capture.output(cat("\n"), file = tempfile_for_sink, append = TRUE) + } + + if (exists("sotkanet_data")){ + print_code <- switch( + menu(choices = c("Yes", "No"), + title = "Print dataset fixity checksum?") + 1, + return(invisible()), + TRUE, + FALSE + ) + + if (print_code){ + fixity <- sotkanet_fixity(sotkanet_data, algorithm = "md5") + capture.output(cat("#### FIXITY CHECKSUM: \n\n"), + file = tempfile_for_sink, append = TRUE) + capture.output(print( + paste0("Fixity checksum (md5) for dataset ", search_id, ": ", + fixity) + ), file = tempfile_for_sink, append = TRUE) + capture.output(cat("\n"), file = tempfile_for_sink, append = TRUE) + } + } + + if (exists("sotkanet_data")){ + cat(readLines(tempfile_for_sink), sep = "\n") + return(sotkanet_data) + } + +} diff --git a/R/sotkanet_read_cache.R b/R/sotkanet_read_cache.R new file mode 100644 index 0000000..21dfa50 --- /dev/null +++ b/R/sotkanet_read_cache.R @@ -0,0 +1,42 @@ +#' @title Read cache for sotkanet data.frame +#' +#' @description +#' Helper function that reads the cache for saved sotkanet data.frame. +#' +#' +#' @param cache a logical whether to do caching. +#' @param cache_dir a path to cache directory. +#' @param query_hash a character used to identify the data.frame. +#' +#' @references See citation("sotkanet") +#' @keywords internal +#' @export +sotkanet_read_cache <- function(cache, cache_dir, query_hash){ + + if (cache){ + + if (is.null(cache_dir)){ + + cache_dir <- file.path(tempdir(), "sotkanet") + + cache_dir <- path.expand(cache_dir) + + } + + if (dir.exists(cache_dir)){ + + cache_file <- file.path(cache_dir, paste0(query_hash, ".rds")) + + if (file.exists(cache_file)){ + + combined_data <- readRDS(cache_file) + + return(combined_data) + + } else { + + return(NULL) + } + } + } + } diff --git a/R/sotkanet_regions.R b/R/sotkanet_regions.R new file mode 100644 index 0000000..988d2d7 --- /dev/null +++ b/R/sotkanet_regions.R @@ -0,0 +1,57 @@ +#' @title Sotkanet Regions +#' @description Retrieves sotkanet regions data. +#' @details Data is fetched from \url{https://sotkanet.fi/rest/1.1/regions}. +#' @param type type output format, either 'table' (default) or 'raw'. +#' Default produces a truncated table with strictly defined columns that +#' are useful in other functions. 'Raw' produces the full output which might +#' be useful for exploratory purposes. +#' @param lang Language of the output. +#' @param user.agent "User agent" defined by the user. Default is NULL which +#' will then use the package identifier "rOpenGov/sotkanet" +#' @param cache a logical whether to do caching. +#' @param cache_dir a path to the cache directory. +#' @return data.frame +#' @export +#' @references See citation("sotkanet") +#' @author Maintainer: Leo Lahti \email{leo.lahti@@iki.fi} +#' @examples +#' \dontrun{ +#' sotkanet.regions <- sotkanet_regions(type = "table", lang = "fi") +#' } +#' @importFrom digest digest +#' @keywords utilities +sotkanet_regions <- function(type = "table", lang = "fi", user.agent = NULL, + cache = TRUE, cache_dir = NULL) +{ + + region_query <- list(type = type, lang = lang) + region_hash <- digest::digest(region_query, algo = "md5") + + region_cache <- sotkanet_read_cache(cache = cache, cache_dir = cache_dir, region_hash) + + if (!is.null(region_cache)){ + return(region_cache) + } + + sotkanet_url <- "https://sotkanet.fi/rest" + sotkanet_uri <- "/1.1/regions" + + # Construct URL + url_object <- httr::parse_url(sotkanet_url) + path <- paste(url_object$path, sotkanet_uri, sep = "") + url_object$path <- path + + final_url <- httr::build_url(url_object) + + res <- sotkanet.json_query(final_url, flatten = TRUE) + + if (type == "table") { + res <- sotkanet_collect(res, "region", lang = lang) + } + + sotkanet_write_cache(cache = cache, cache_dir = cache_dir, region_hash, res) + + res +} + + diff --git a/R/sotkanet_write_cache.R b/R/sotkanet_write_cache.R new file mode 100644 index 0000000..7c5a4e5 --- /dev/null +++ b/R/sotkanet_write_cache.R @@ -0,0 +1,37 @@ +#' @title Write sotkanet data.frame into cache +#' +#' @description +#' Helper function that writes the sotkanet data.frame into cache. +#' +#' @param cache a logical whether to do caching. +#' @param cache_dir a path to cache directory. +#' @param query_hash a character used to identify the data.frame. +#' @param data sotkanet data.frame. +#' +#' +#' @references See citation("sotkanet") +#' @keywords internal +#' @export +sotkanet_write_cache <- function(cache, cache_dir, query_hash, data){ + + if (cache){ + + if (is.null(cache_dir)){ + + cache_dir <- file.path(tempdir(), "sotkanet") + + cache_dir <- path.expand(cache_dir) + + } + + if(!dir.exists(cache_dir)){ + + dir.create(cache_dir, recursive = TRUE) + } + + cache_file <- file.path(cache_dir, paste0(query_hash, ".rds")) + + saveRDS(data, file = cache_file, compress = TRUE) + + } +} diff --git a/R/write_frictionless_metadata.R b/R/write_frictionless_metadata.R new file mode 100644 index 0000000..1bcbbca --- /dev/null +++ b/R/write_frictionless_metadata.R @@ -0,0 +1,58 @@ +#' @title Writes the sotkanet data.frame in datapackage form +#' +#' @description +#' A function that turns the data.frame into a datapackage with the metadata added in. +#' +#' @param indicators Dataset identifier +#' @param data The sotkanet data.frame object +#' +#' @return datapackage +#' +#' @references See citation("sotkanet") +#' @importFrom frictionless create_package add_resource +#' @keywords internal +#' @export +write_frictionless_metadata <- function(indicators, data){ + + combined_data_package <- create_package() + + combined_data_package <- add_resource(combined_data_package, + resource_name = paste0("sotkanet"), + data = data) + + for (i in 1:length(indicators)){ + + meta <- SotkanetIndicatorMetadata(indicators[i]) + + sources <- list(meta$organization[2], + path = paste0("https://sotkanet.fi/sotkanet/en/metadata/indicators/", meta$id)) + + sources <- as.list(unlist(sources)) + + meta_list <- list( + + title = meta$title, + + id = meta$id, + + description = meta$description, + + homepage = "sotkanet.fi", + + sources = sources, + + updated = meta$`data-updated` + ) + + temp_list <- list() + temp_list[[paste0("sotkanet-", meta$id)]] <- meta_list + + combined_data_package <- append(combined_data_package, temp_list) + + } + + combined_data_package <- create_package(combined_data_package) + + return(combined_data_package) + +} diff --git a/man/GetDataSotkanet.Rd b/man/GetDataSotkanet.Rd index 2e386c2..6f4f9d7 100755 --- a/man/GetDataSotkanet.Rd +++ b/man/GetDataSotkanet.Rd @@ -16,13 +16,13 @@ GetDataSotkanet( \arguments{ \item{indicators}{Dataset identifier(s)} -\item{years}{vector of years, for example `2015:2018` or `c(2010, 2012, ...)`} +\item{years}{vector of years c(2010, 2012, ... )} \item{genders}{vector of genders ('male' | 'female' | 'total')} \item{regions}{filter by selected regions only (default: all regions)} -\item{region.category}{filter by one or more of the following 14 valid +\item{region.category}{filter by one or more of the following 14 valid regions categories (default: all categories) \itemize{ \item "ALUEHALLINTOVIRASTO" @@ -51,20 +51,20 @@ data.frame Retrieve selected data and combine into a single table. } \details{ -THL's open data license and limitation of liability +THL's open data license and limitation of liability \strong{License} -The open data provided by National Institute for Health and Welfare is -licensed under CC BY 4.0. This license defines how open data can be utilized. +The open data provided by National Institute for Health and Welfare is +licensed under CC BY 4.0. This license defines how open data can be utilized. The licensing is based on a decision made by the Director General. \strong{Limitation of Liability} -National Institute for Health and Welfare shall not be liable for any loss, -legal proceedings, claims, proceedings, demands, costs or damages regardless -of their cause or form, which can be directly or indirectly connected to -open data or use of open data published by National Institute for Health and +National Institute for Health and Welfare shall not be liable for any loss, +legal proceedings, claims, proceedings, demands, costs or damages regardless +of their cause or form, which can be directly or indirectly connected to +open data or use of open data published by National Institute for Health and Welfare. } \examples{ diff --git a/man/get_sotkanet.Rd b/man/get_sotkanet.Rd new file mode 100644 index 0000000..b63d59b --- /dev/null +++ b/man/get_sotkanet.Rd @@ -0,0 +1,104 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/get_sotkanet.R +\name{get_sotkanet} +\alias{get_sotkanet} +\title{Retrieve Sotkanet Data} +\usage{ +get_sotkanet( + indicators = NULL, + years = 1991:2015, + genders = c("total"), + regions = NULL, + region.category = NULL, + lang = "fi", + user.agent = NULL, + cache = TRUE, + cache_dir = NULL, + frictionless = FALSE +) +} +\arguments{ +\item{indicators}{Dataset identifier(s)} + +\item{years}{vector of years, for example `2015:2018` or `c(2010, 2012, ...)`} + +\item{genders}{vector of genders ('male' | 'female' | 'total')} + +\item{regions}{filter by selected regions only (default: all regions)} + +\item{region.category}{filter by one or more of the following 14 valid +regions categories (default: all categories) + \itemize{ + \item "ALUEHALLINTOVIRASTO" + \item "ELY-KESKUS" + \item "ERVA" + \item "EURALUEET" (Eurozone) + \item "EUROOPPA" (Europe) + \item "HYVINVOINTIALUE" (welfare country) + \item "KUNTA" (municipality) + \item "MAA" (country) + \item "MAAKUNTA" (region) + \item "NUTS1" + \item "POHJOISMAAT" (Nordic countries) + \item "SAIRAANHOITOPIIRI (hospital district) + \item "SEUTUKUNTA" + \item "SUURALUE" + }} + +\item{lang}{Language of the data variables: indicator.title, region.title +and indicator.organization.title. Default is Finnish ("fi"), the other options being +English ("en") and Swedish ("sv").} + +\item{user.agent}{"User agent" defined by the user. Default is NULL which} + +\item{cache}{a logical whether to do caching. Defaults is `TRUE`.} + +\item{cache_dir}{a path to cache directory. `Null` (default) uses and creates +"sotkanet" directory in the temporary directory defined by base R [tempdir()] +function. The user can set the cache directory to an existing directory with this +argument. + will then use the package identifier "rOpenGov/sotkanet"} + +\item{frictionless}{a logical whether to return a datapackage, with metadata inside, +instead of a data.frame.} +} +\value{ +Returns a data.frame when frictionless is `FALSE` and a datapackage +when frictionless is `TRUE`. +} +\description{ +Retrieve selected data and combine into a single table. +} +\details{ +THL's open data license and limitation of liability + +\strong{License} + +The open data provided by National Institute for Health and Welfare is +licensed under CC BY 4.0. This license defines how open data can be utilized. +The licensing is based on a decision made by the Director General. + +\strong{Limitation of Liability} + +National Institute for Health and Welfare shall not be liable for any loss, +legal proceedings, claims, proceedings, demands, costs or damages regardless +of their cause or form, which can be directly or indirectly connected to +open data or use of open data published by National Institute for Health and +Welfare. +} +\examples{ +\dontrun{dat <- get_sotkanet(indicators = 165)} +} +\references{ +See citation("sotkanet") +} +\seealso{ +For more information about dataset structure, see THL webpage at +\url{https://yhteistyotilat.fi/wiki08/pages/viewpage.action?pageId=27557907} + +THL open data license website: \url{https://yhteistyotilat.fi/wiki08/x/AAadAg} +} +\author{ +Maintainer: Leo Lahti \email{leo.lahti@iki.fi}, Pyry Kantanen +} +\keyword{utilities} diff --git a/man/sotkanet-package.Rd b/man/sotkanet-package.Rd index f3d3692..e023b09 100755 --- a/man/sotkanet-package.Rd +++ b/man/sotkanet-package.Rd @@ -64,7 +64,7 @@ The `sotkanet` R package aims to make retrieval of sotkanet Package: \tab sotkanet\cr Type: \tab Package\cr Version: \tab See sessionInfo() or DESCRIPTION file\cr - Date: \tab 2013-2023\cr + Date: \tab 2013-2024\cr License: \tab BSD 2-clause License\cr LazyLoad: \tab yes\cr } diff --git a/man/SotkanetCite.Rd b/man/sotkanet_cite.Rd similarity index 85% rename from man/SotkanetCite.Rd rename to man/sotkanet_cite.Rd index fe168d3..92e74eb 100644 --- a/man/SotkanetCite.Rd +++ b/man/sotkanet_cite.Rd @@ -1,10 +1,10 @@ % Generated by roxygen2: do not edit by hand -% Please edit documentation in R/SotkanetCite.R -\name{SotkanetCite} -\alias{SotkanetCite} +% Please edit documentation in R/sotkanet_cite.R +\name{sotkanet_cite} +\alias{sotkanet_cite} \title{Crate a Data Bibliography} \usage{ -SotkanetCite(id, lang = "fi", format = "Biblatex") +sotkanet_cite(id, lang = "fi", format = "Biblatex") } \arguments{ \item{id}{Indicator id.} diff --git a/man/sotkanet_clean_cache.Rd b/man/sotkanet_clean_cache.Rd new file mode 100644 index 0000000..d99124c --- /dev/null +++ b/man/sotkanet_clean_cache.Rd @@ -0,0 +1,20 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/sotkanet_clean_cache.R +\name{sotkanet_clean_cache} +\alias{sotkanet_clean_cache} +\title{Clean sotkanet Cache} +\usage{ +sotkanet_clean_cache(cache_dir = NULL) +} +\arguments{ +\item{cache_dir}{A path to the cache directory. If `NULL` (default) tries to clean default +temporary cache directory.} +} +\description{ +Delete all .rds files from the sotkanet cache directory. +} +\examples{ +\dontrun{ +SotkanetCleanCache() +} +} diff --git a/man/sotkanet_fixity.Rd b/man/sotkanet_fixity.Rd new file mode 100644 index 0000000..16cc880 --- /dev/null +++ b/man/sotkanet_fixity.Rd @@ -0,0 +1,38 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/sotkanet_fixity.R +\name{sotkanet_fixity} +\alias{sotkanet_fixity} +\title{Calculate fixity checksum for an object} +\usage{ +sotkanet_fixity(data, algorithm = "md5") +} +\arguments{ +\item{data}{A sotkanet dataset.} + +\item{algorithm}{Algorithm used for calculating the checksum. Default is `md5`, but +supports all the algorithms in digest function.} +} +\value{ +A character string +} +\description{ +Uses a hash function (md5) on a sotkanet dataset and calculates a digest of the dataset +as a character string. +} +\details{ +Fixity can be used to make sure that the file has not changed. This is done by calculating +a checksum for the dataset that will change if the dataset changes. The default algorithm +used to calculate the checksum is md5 hash, but all the algorithms supported by imported +digest function are applicable. See the digest function documentation for more details. + +This function takes the whole dataset as an input. This means that everything to do with +the data is used when calculating the fixity checksum, so for example, if you change +the language of the output the checksum will also change. +} +\seealso{ +[digest::digest()] + +For more info on fixity checksum see: +\url{https://www.dpconline.org/handbook/technical-solutions-and-tools/fixity-and-checksums} +} +\keyword{internal} diff --git a/man/sotkanet_indicators.Rd b/man/sotkanet_indicators.Rd new file mode 100644 index 0000000..1ddf257 --- /dev/null +++ b/man/sotkanet_indicators.Rd @@ -0,0 +1,53 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/sotkanet_indicators.R +\name{sotkanet_indicators} +\alias{sotkanet_indicators} +\title{Sotkanet Indicators} +\usage{ +sotkanet_indicators( + id = NULL, + type = "table", + lang = "fi", + user.agent = NULL, + cache = TRUE, + cache_dir = NULL +) +} +\arguments{ +\item{id}{Dataset identifier. Default is NULL returning all} + +\item{type}{type output format, either 'table' (default) or 'raw'. +Default produces a truncated table with strictly defined columns that +are useful in other functions. 'Raw' produces the full output which might +be useful for exploratory purposes.} + +\item{lang}{Language of the output.} + +\item{user.agent}{"User agent" defined by the user. Default is NULL which +will then use the package identifier "rOpenGov/sotkanet"} + +\item{cache}{a logical whether to do caching.} + +\item{cache_dir}{a path to the cache dir.} +} +\value{ +data.frame (type = "table) or a list (type = "raw") +} +\description{ +Retrieve Sotkanet indicator metadata +} +\details{ +Data is fetched from \url{https://sotkanet.fi/rest/1.1/indicators}. +} +\examples{ +\dontrun{ +sotkanet.indicators <- sotkanet_indicators(type = "table", lang = "fi") +} +} +\references{ +See citation("sotkanet") +} +\author{ +Leo Lahti \email{leo.lahti@iki.fi}, Pyry Kantanen +} +\keyword{utilities} diff --git a/man/sotkanet_interactive.Rd b/man/sotkanet_interactive.Rd new file mode 100644 index 0000000..2bec676 --- /dev/null +++ b/man/sotkanet_interactive.Rd @@ -0,0 +1,17 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/sotkanet_interactive.R +\name{sotkanet_interactive} +\alias{sotkanet_interactive} +\title{Interactive function for get_sotkanet} +\usage{ +sotkanet_interactive(code = NULL) +} +\arguments{ +\item{code}{A unique identifier for the dataset of interest.} +} +\description{ +A simple interactive function, that helps with downloading Sotkanet data +} +\seealso{ +[get_sotkanet()] +} diff --git a/man/sotkanet_read_cache.Rd b/man/sotkanet_read_cache.Rd new file mode 100644 index 0000000..7e7536b --- /dev/null +++ b/man/sotkanet_read_cache.Rd @@ -0,0 +1,22 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/sotkanet_read_cache.R +\name{sotkanet_read_cache} +\alias{sotkanet_read_cache} +\title{Read cache for sotkanet data.frame} +\usage{ +sotkanet_read_cache(cache, cache_dir, query_hash) +} +\arguments{ +\item{cache}{a logical whether to do caching.} + +\item{cache_dir}{a path to cache directory.} + +\item{query_hash}{a character used to identify the data.frame.} +} +\description{ +Helper function that reads the cache for saved sotkanet data.frame. +} +\references{ +See citation("sotkanet") +} +\keyword{internal} diff --git a/man/sotkanet_regions.Rd b/man/sotkanet_regions.Rd new file mode 100644 index 0000000..7c3f2c9 --- /dev/null +++ b/man/sotkanet_regions.Rd @@ -0,0 +1,50 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/sotkanet_regions.R +\name{sotkanet_regions} +\alias{sotkanet_regions} +\title{Sotkanet Regions} +\usage{ +sotkanet_regions( + type = "table", + lang = "fi", + user.agent = NULL, + cache = TRUE, + cache_dir = NULL +) +} +\arguments{ +\item{type}{type output format, either 'table' (default) or 'raw'. +Default produces a truncated table with strictly defined columns that +are useful in other functions. 'Raw' produces the full output which might +be useful for exploratory purposes.} + +\item{lang}{Language of the output.} + +\item{user.agent}{"User agent" defined by the user. Default is NULL which +will then use the package identifier "rOpenGov/sotkanet"} + +\item{cache}{a logical whether to do caching.} + +\item{cache_dir}{a path to the cache directory.} +} +\value{ +data.frame +} +\description{ +Retrieves sotkanet regions data. +} +\details{ +Data is fetched from \url{https://sotkanet.fi/rest/1.1/regions}. +} +\examples{ +\dontrun{ +sotkanet.regions <- sotkanet_regions(type = "table", lang = "fi") +} +} +\references{ +See citation("sotkanet") +} +\author{ +Maintainer: Leo Lahti \email{leo.lahti@iki.fi} +} +\keyword{utilities} diff --git a/man/sotkanet_write_cache.Rd b/man/sotkanet_write_cache.Rd new file mode 100644 index 0000000..c5726ec --- /dev/null +++ b/man/sotkanet_write_cache.Rd @@ -0,0 +1,24 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/sotkanet_write_cache.R +\name{sotkanet_write_cache} +\alias{sotkanet_write_cache} +\title{Write sotkanet data.frame into cache} +\usage{ +sotkanet_write_cache(cache, cache_dir, query_hash, data) +} +\arguments{ +\item{cache}{a logical whether to do caching.} + +\item{cache_dir}{a path to cache directory.} + +\item{query_hash}{a character used to identify the data.frame.} + +\item{data}{sotkanet data.frame.} +} +\description{ +Helper function that writes the sotkanet data.frame into cache. +} +\references{ +See citation("sotkanet") +} +\keyword{internal} diff --git a/man/write_frictionless_metadata.Rd b/man/write_frictionless_metadata.Rd new file mode 100644 index 0000000..65e8e3a --- /dev/null +++ b/man/write_frictionless_metadata.Rd @@ -0,0 +1,23 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/write_frictionless_metadata.R +\name{write_frictionless_metadata} +\alias{write_frictionless_metadata} +\title{Writes the sotkanet data.frame in datapackage form} +\usage{ +write_frictionless_metadata(indicators, data) +} +\arguments{ +\item{indicators}{Dataset identifier} + +\item{data}{The sotkanet data.frame object} +} +\value{ +datapackage +} +\description{ +A function that turns the data.frame into a datapackage with the metadata added in. +} +\references{ +See citation("sotkanet") +} +\keyword{internal} diff --git a/tests/testthat/test-SotkanetCite.R b/tests/testthat/test-SotkanetCite.R index 01831ae..2932298 100644 --- a/tests/testthat/test-SotkanetCite.R +++ b/tests/testthat/test-SotkanetCite.R @@ -1,8 +1,8 @@ -test_that("SotkanetCite gives correct results", { +test_that("sotkanet_cite gives correct results", { expect_equal( class( - SotkanetCite( + sotkanet_cite( id = 10013, lang = "en" )), @@ -11,7 +11,7 @@ test_that("SotkanetCite gives correct results", { expect_equal( class( - SotkanetCite( + sotkanet_cite( id = 10012, lang = "fi" )), @@ -20,7 +20,7 @@ test_that("SotkanetCite gives correct results", { expect_equal( class( - SotkanetCite( + sotkanet_cite( id = 10011, lang = "sv" )), @@ -28,12 +28,12 @@ test_that("SotkanetCite gives correct results", { ) expect_error( - SotkanetCite(10013, lang = "ru" + sotkanet_cite(10013, lang = "ru" )) expect_equal( class( - SotkanetCite( + sotkanet_cite( id = 10013, format = "bibentry"))[2], "bibentry" @@ -41,14 +41,14 @@ test_that("SotkanetCite gives correct results", { expect_equal( class( - SotkanetCite( + sotkanet_cite( id = 10013, format = "bibtex")), "Bibtex" ) expect_warning( - SotkanetCite( + sotkanet_cite( id = 10013, format = "test" )