From 404b72098c7deac58ffdb4ae31e92d3c3abce580 Mon Sep 17 00:00:00 2001 From: Sander Devisscher Date: Tue, 2 Jan 2024 10:57:31 +0100 Subject: [PATCH 1/9] Update upload_direct_to_UAT.R --- src/upload_direct_to_UAT.R | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/upload_direct_to_UAT.R b/src/upload_direct_to_UAT.R index f95d4c1f..d9308f1c 100644 --- a/src/upload_direct_to_UAT.R +++ b/src/upload_direct_to_UAT.R @@ -7,12 +7,14 @@ #' needed to access the bucket. # Libraries #### +print("libraries") library(aws.s3) library(testthat) directFilePath <- "./data/output/UAT_direct" # connect to bucket #### +print("source connect_to_bucket.R") source("./src/connect_to_bucket.R") # get feedback #### @@ -20,6 +22,7 @@ source("./src/connect_to_bucket.R") bucket <- Sys.getenv("S3_bucket") # bucket <- config::get("bucket", file = system.file("config.yml", package = "alienSpecies")) +print("get_bucket_df") bucket_df <- get_bucket_df(bucket, region = "eu-west-1") # test uploaded files #### # A placeholder for a alienSpecies function to test the files on the bucket. @@ -27,7 +30,7 @@ bucket_df <- get_bucket_df(bucket, region = "eu-west-1") # files that are currently in management needs to be uploaded to the bucket directFiles <- c("Oxyura_jamaicensis.csv", "Lithobates_catesbeianus.csv", "Ondatra_zibethicus.csv", "translations.csv") - +print("lapply put_object") lapply(directFiles, function(fileName){ put_object(file.path(directFilePath, fileName), From 271974f1e410a1b3720faaf944d921267e42a70c Mon Sep 17 00:00:00 2001 From: Sander Devisscher Date: Tue, 2 Jan 2024 11:50:54 +0100 Subject: [PATCH 2/9] disable connect_to_bucket.R --- src/upload_direct_to_UAT.R | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/upload_direct_to_UAT.R b/src/upload_direct_to_UAT.R index d9308f1c..b38684bb 100644 --- a/src/upload_direct_to_UAT.R +++ b/src/upload_direct_to_UAT.R @@ -14,8 +14,9 @@ library(testthat) directFilePath <- "./data/output/UAT_direct" # connect to bucket #### -print("source connect_to_bucket.R") -source("./src/connect_to_bucket.R") +# run this code when you run this script locally +# print("source connect_to_bucket.R") +# source("./src/connect_to_bucket.R") # get feedback #### From ea0f6fa271edec20070fb96197a78faeb6484989 Mon Sep 17 00:00:00 2001 From: Sander Devisscher Date: Tue, 2 Jan 2024 13:18:47 +0100 Subject: [PATCH 3/9] add s3:// to bucket variable --- src/upload_direct_to_UAT.R | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/upload_direct_to_UAT.R b/src/upload_direct_to_UAT.R index b38684bb..a224d306 100644 --- a/src/upload_direct_to_UAT.R +++ b/src/upload_direct_to_UAT.R @@ -20,7 +20,7 @@ directFilePath <- "./data/output/UAT_direct" # get feedback #### -bucket <- Sys.getenv("S3_bucket") +bucket <- paste0("s3://",Sys.getenv("S3_bucket")) # bucket <- config::get("bucket", file = system.file("config.yml", package = "alienSpecies")) print("get_bucket_df") From 2ec7870d9c09a486501b95bc3775db9ee45bf3da Mon Sep 17 00:00:00 2001 From: Sander Devisscher Date: Tue, 2 Jan 2024 13:58:32 +0100 Subject: [PATCH 4/9] add test --- src/upload_direct_to_UAT.R | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/upload_direct_to_UAT.R b/src/upload_direct_to_UAT.R index a224d306..1367d428 100644 --- a/src/upload_direct_to_UAT.R +++ b/src/upload_direct_to_UAT.R @@ -17,9 +17,15 @@ directFilePath <- "./data/output/UAT_direct" # run this code when you run this script locally # print("source connect_to_bucket.R") # source("./src/connect_to_bucket.R") - +# connect_to_bucket(Sys.getenv("UAT_BUCKET")) # get feedback #### +# test S3_bucket #### +print("test S3_bucket") +if(Sys.getenv("S3_bucket") == ""){ + stop("S3_bucket is not provided") +} + bucket <- paste0("s3://",Sys.getenv("S3_bucket")) # bucket <- config::get("bucket", file = system.file("config.yml", package = "alienSpecies")) From 6b573afa28382c7d86d3b2a910ef04174b9dee00 Mon Sep 17 00:00:00 2001 From: Sander Devisscher Date: Tue, 2 Jan 2024 14:27:57 +0100 Subject: [PATCH 5/9] S3_BUCKET instead of S3_bucket --- src/upload_direct_to_UAT.R | 4 ++-- src/upload_processing_to_UAT.R | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/upload_direct_to_UAT.R b/src/upload_direct_to_UAT.R index 1367d428..e1a0d55d 100644 --- a/src/upload_direct_to_UAT.R +++ b/src/upload_direct_to_UAT.R @@ -22,11 +22,11 @@ directFilePath <- "./data/output/UAT_direct" # test S3_bucket #### print("test S3_bucket") -if(Sys.getenv("S3_bucket") == ""){ +if(Sys.getenv("S3_BUCKET") == ""){ stop("S3_bucket is not provided") } -bucket <- paste0("s3://",Sys.getenv("S3_bucket")) +bucket <- paste0("s3://",Sys.getenv("S3_BUCKET")) # bucket <- config::get("bucket", file = system.file("config.yml", package = "alienSpecies")) print("get_bucket_df") diff --git a/src/upload_processing_to_UAT.R b/src/upload_processing_to_UAT.R index f78f97f9..84961af3 100644 --- a/src/upload_processing_to_UAT.R +++ b/src/upload_processing_to_UAT.R @@ -20,7 +20,7 @@ processingFilePath <- "./data/output/UAT_processing" # connect to bucket #### source("./src/connect_to_bucket.R") -bucket <- Sys.getenv("UAT_bucket") +bucket <- Sys.getenv("S3_BUCKET") Sys.setenv("AWS_DEFAULT_REGION" = "eu-west-1") connect_to_bucket(bucket) From 064af4c215e5761da0a69739efce034b93b9f1bc Mon Sep 17 00:00:00 2001 From: Sander Devisscher Date: Tue, 2 Jan 2024 14:58:14 +0100 Subject: [PATCH 6/9] drop unnecessary steps --- .github/workflows/upload_files_direct.yaml | 25 +----------------- .../workflows/upload_files_processing.yaml | 26 ++----------------- 2 files changed, 3 insertions(+), 48 deletions(-) diff --git a/.github/workflows/upload_files_direct.yaml b/.github/workflows/upload_files_direct.yaml index af78105d..84187fd2 100644 --- a/.github/workflows/upload_files_direct.yaml +++ b/.github/workflows/upload_files_direct.yaml @@ -89,27 +89,4 @@ jobs: AWS_SESSION_TOKEN: ${{ steps.assume_role.outputs.AWS_SESSION_TOKEN }} AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} S3_BUCKET: ${{ secrets.S3_BUCKET }} - - - name: Commit and push changes - uses: devops-infra/action-commit-push@master - with: - github_token: ${{ secrets.AUTOMATISATION }} - commit_prefix: "[AUTO]" - commit_message: "upload files direct to UAT" - target_branch: upload files to UAT - add_timestamp: true - - - name: Get branch name - run: | - git branch --show-current - - - name: Create pull request - uses: devops-infra/action-pull-request@v0.4.2 - with: - github_token: ${{ secrets.AUTOMATISATION }} - target_branch: uat - title: "[AUTO] upload files direct to UAT" - template: .github/PR_upload_files_to_UAT.md - reviewer: SanderDevisscher - label: automated workflow - get_diff: false + \ No newline at end of file diff --git a/.github/workflows/upload_files_processing.yaml b/.github/workflows/upload_files_processing.yaml index 2d03ec30..bd2d90fa 100644 --- a/.github/workflows/upload_files_processing.yaml +++ b/.github/workflows/upload_files_processing.yaml @@ -86,28 +86,6 @@ jobs: AWS_ACCESS_KEY_ID: ${{ steps.assume_role.outputs.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ steps.assume_role.outputs.AWS_SECRET_ACCESS_KEY }} AWS_SESSION_TOKEN: ${{ steps.assume_role.outputs.AWS_SESSION_TOKEN }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} S3_BUCKET: ${{ secrets.S3_BUCKET }} - - - name: Commit and push changes - uses: devops-infra/action-commit-push@master - with: - github_token: ${{ secrets.AUTOMATISATION }} - commit_prefix: "[AUTO]" - commit_message: "upload processed files to UAT" - target_branch: upload files to UAT - add_timestamp: true - - - name: Get branch name - run: | - git branch --show-current - - - name: Create pull request - uses: devops-infra/action-pull-request@v0.4.2 - with: - github_token: ${{ secrets.AUTOMATISATION }} - target_branch: uat - title: "[AUTO] upload processed files to UAT" - template: .github/PR_upload_files_to_UAT.md - reviewer: SanderDevisscher - label: automated workflow - get_diff: false + \ No newline at end of file From 293fa9e128c94f3ec8eab58a5841734695d8ac7a Mon Sep 17 00:00:00 2001 From: Sander Devisscher Date: Tue, 2 Jan 2024 15:35:58 +0100 Subject: [PATCH 7/9] disable connect_to_bucket --- src/upload_processing_to_UAT.R | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/upload_processing_to_UAT.R b/src/upload_processing_to_UAT.R index 84961af3..948aa6d7 100644 --- a/src/upload_processing_to_UAT.R +++ b/src/upload_processing_to_UAT.R @@ -23,7 +23,7 @@ source("./src/connect_to_bucket.R") bucket <- Sys.getenv("S3_BUCKET") Sys.setenv("AWS_DEFAULT_REGION" = "eu-west-1") -connect_to_bucket(bucket) +#connect_to_bucket(bucket) #=> run this before continuing locally ############################################################### ## The following create* function will take input data, From 5cd12178caddcd74a5b017eefdf5fbdbb1a78cba Mon Sep 17 00:00:00 2001 From: Sander Devisscher Date: Tue, 2 Jan 2024 17:49:21 +0100 Subject: [PATCH 8/9] change alienspecies branch => change to uat when sprint_v0.0.4 is merged --- .github/workflows/upload_files_processing.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/upload_files_processing.yaml b/.github/workflows/upload_files_processing.yaml index bd2d90fa..c97f7eae 100644 --- a/.github/workflows/upload_files_processing.yaml +++ b/.github/workflows/upload_files_processing.yaml @@ -52,7 +52,7 @@ jobs: sudo apt install --yes libharfbuzz-dev libfribidi-dev R --no-save -e 'install.packages("devtools")' R --no-save -e 'devtools::install_github("inbo/INBOtheme@v0.5.9", force = TRUE)' - R --no-save -e 'devtools::install_github("inbo/alien-species-portal@main", + R --no-save -e 'devtools::install_github("inbo/alien-species-portal@sprint_v0.0.4", subdir = "alienSpecies", force = TRUE)' - name: Install R packages From d8dcf73cf731e3e95c2afad64c3b032298d2f432 Mon Sep 17 00:00:00 2001 From: Sander Devisscher Date: Tue, 2 Jan 2024 19:04:45 +0100 Subject: [PATCH 9/9] add feedback --- src/upload_processing_to_UAT.R | 24 ++++++++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/src/upload_processing_to_UAT.R b/src/upload_processing_to_UAT.R index 948aa6d7..8c4c39b1 100644 --- a/src/upload_processing_to_UAT.R +++ b/src/upload_processing_to_UAT.R @@ -18,10 +18,15 @@ library(testthat) processingFilePath <- "./data/output/UAT_processing" # connect to bucket #### -source("./src/connect_to_bucket.R") +#source("./src/connect_to_bucket.R") -bucket <- Sys.getenv("S3_BUCKET") -Sys.setenv("AWS_DEFAULT_REGION" = "eu-west-1") +print("test S3_bucket") +if(Sys.getenv("S3_BUCKET") == ""){ + stop("S3_bucket is not provided") +} + +bucket <- paste0("s3://",Sys.getenv("S3_BUCKET")) +#Sys.setenv("AWS_DEFAULT_REGION" = "eu-west-1") #connect_to_bucket(bucket) #=> run this before continuing locally @@ -33,45 +38,56 @@ Sys.setenv("AWS_DEFAULT_REGION" = "eu-west-1") # input: folder grid containing gewestbel shape data # output: grid.RData +print("grid") createShapeData(dataDir = file.path(processingFilePath, "grid"), bucket = bucket) # input Vespa_velutina_shape" folder containing shape data # output: Vespa_velutina_shape.RData +print("Vespa velutina") createShapeData(dataDir = file.path(processingFilePath,"Vespa_velutina_shape"), bucket = bucket) # input: folder occurrenceCube containing be_1km and be_20 km shape data # output: occurrenceCube.RData +print("occurrenceCube") createShapeData(dataDir = file.path(processingFilePath,"occurrenceCube"), bucket = bucket) # output: provinces.RData +print("provinces") createShapeData(dataDir = file.path(processingFilePath,"provinces.geojson"), bucket = bucket) # output: communes.RData +print("communes") createShapeData(dataDir = file.path(processingFilePath,"communes.geojson"), bucket = bucket) # create key data # input: "be_alientaxa_info.csv" # output: "keys.csv" +print("key data") createKeyData(dataDir = processingFilePath, bucket = bucket) # create occupancy cube # input: trendOccupancy folder containing T1* and ias_belgium_t0_2016/18/20 geojson data # output: dfCube.RData +print("dfcube") createOccupancyCube(file.path(processingFilePath, "trendOccupancy"), bucket = bucket) # create tabular data # input: data_input_checklist_indicators.tsv/eu_concern_species.tsv/be_alientaxa_cube.csv # output: "eu_concern_species_processed.RData"/"data_input_checklist_indicators_processed.RData"/ "be_alientaxa_cube_processed.RData" - +print("tabular data") +print("indicators") createTabularData(dataDir = processingFilePath, type = "indicators", bucket = bucket) +print("unionlist") createTabularData(dataDir = processingFilePath, type = "unionlist", bucket = bucket) +print("occurrence") createTabularData(dataDir = processingFilePath, type = "occurrence", bucket = bucket) ################################################### # test if all the data files needed are on bucket # # and can be read into R # ################################################### +print("tests") test_that("Load shape data", {