-
Notifications
You must be signed in to change notification settings - Fork 494
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge branch 'develop' into 6919-preview-tools #6919
- Loading branch information
Showing
20 changed files
with
558 additions
and
59 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
In the "Notes for Dataverse Installation Administrators" we should mention the new scripts for MDC. |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,12 @@ | ||
## Google Cloud Archiver | ||
|
||
Dataverse Bags can now be sent to a bucket in Google Cloud, including those in the 'Coldline' storage class, which provide less expensive but slower access. | ||
|
||
## Use Cases | ||
|
||
- As an Administrator I can set up a regular export to Google Cloud so that my users' data is preserved. | ||
|
||
## New Settings | ||
|
||
:GoogleCloudProject - the name of the project managing the bucket. | ||
:GoogleCloudBucket - the name of the bucket to use |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,36 @@ | ||
#! /bin/bash | ||
|
||
COUNTER_PROCESSOR_DIRECTORY="/usr/local/counter-processor-0.0.1" | ||
MDC_LOG_DIRECTORY="/usr/local/payara5/glassfish/domains/domain1/logs/mdc" | ||
|
||
# counter_daily.sh | ||
|
||
cd $COUNTER_PROCESSOR_DIRECTORY | ||
|
||
echo >>/tmp/counter_daily.log | ||
date >>/tmp/counter_daily.log | ||
echo >>/tmp/counter_daily.log | ||
|
||
# "You should run Counter Processor once a day to create reports in SUSHI (JSON) format that are saved to disk for Dataverse to process and that are sent to the DataCite hub." | ||
|
||
LAST=$(date -d "yesterday 13:00" '+%Y-%m-%d') | ||
# echo $LAST | ||
YEAR_MONTH=$(date -d "yesterday 13:00" '+%Y-%m') | ||
# echo $YEAR_MONTH | ||
d=$(date -I -d "$YEAR_MONTH-01") | ||
#echo $d | ||
while [ "$(date -d "$d" +%Y%m%d)" -le "$(date -d "$LAST" +%Y%m%d)" ]; | ||
do | ||
if [ -f "$MDC_LOG_DIRECTORY/counter_$d.log" ]; then | ||
# echo "Found counter_$d.log" | ||
else | ||
touch "$MDC_LOG_DIRECTORY/counter_$d.log" | ||
fi | ||
d=$(date -I -d "$d + 1 day") | ||
done | ||
|
||
#run counter-processor as counter user | ||
|
||
sudo -u counter YEAR_MONTH=$YEAR_MONTH python3 main.py >>/tmp/counter_daily.log | ||
|
||
curl -X POST "http://localhost:8080/api/admin/makeDataCount/addUsageMetricsFromSushiReport?reportOnDisk=/tmp/make-data-count-report.json" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,48 @@ | ||
#!/bin/sh | ||
#counter_weekly.sh | ||
|
||
# This script iterates through all published Datasets in all Dataverses and calls the Make Data Count API to update their citations from DataCite | ||
# Note: Requires curl and jq for parsing JSON responses form curl | ||
|
||
# A recursive method to process each Dataverse | ||
processDV () { | ||
echo "Processing Dataverse ID#: $1" | ||
|
||
#Call the Dataverse API to get the contents of the Dataverse (without credentials, this will only list published datasets and dataverses | ||
DVCONTENTS=$(curl -s http://localhost:8080/api/dataverses/$1/contents) | ||
|
||
# Iterate over all datasets, pulling the value of their DOIs (as part of the persistentUrl) from the json returned | ||
for subds in $(echo "${DVCONTENTS}" | jq -r '.data[] | select(.type == "dataset") | .persistentUrl'); do | ||
|
||
#The authority/identifier are preceded by a protocol/host, i.e. https://doi.org/ | ||
DOI=`expr "$subds" : '.*:\/\/\doi\.org\/\(.*\)'` | ||
|
||
# Call the Dataverse API for this dataset and get the response | ||
RESULT=$(curl -s -X POST "http://localhost:8080/api/admin/makeDataCount/:persistentId/updateCitationsForDataset?persistentId=doi:$DOI" ) | ||
# Parse the status and number of citations found from the response | ||
STATUS=$(echo "$RESULT" | jq -j '.status' ) | ||
CITATIONS=$(echo "$RESULT" | jq -j '.data.citationCount') | ||
|
||
# The status for a call that worked | ||
OK='OK' | ||
|
||
# Check the status and report | ||
if [ "$STATUS" = "$OK" ]; then | ||
echo "Updated: $CITATIONS citations for doi:$DOI" | ||
else | ||
echo "Failed to update citations for doi:$DOI" | ||
echo "Run curl -s -X POST 'http://localhost:8080/api/admin/makeDataCount/:persistentId/updateCitationsForDataset?persistentId=doi:$DOI ' to retry/see the error message" | ||
fi | ||
#processDV $subds | ||
done | ||
|
||
# Now iterate over any child Dataverses and recursively process them | ||
for subdv in $(echo "${DVCONTENTS}" | jq -r '.data[] | select(.type == "dataverse") | .id'); do | ||
echo $subdv | ||
processDV $subdv | ||
done | ||
|
||
} | ||
|
||
# Call the function on the root dataverse to start processing | ||
processDV 1 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.