diff --git a/Vagrantfile b/Vagrantfile index f846d826726..689342b8a8d 100644 --- a/Vagrantfile +++ b/Vagrantfile @@ -11,7 +11,7 @@ Vagrant.configure(VAGRANTFILE_API_VERSION) do |config| puts "https://github.com/IQSS/dataverse/issues/6849" puts puts "You can also try the Vagrant environment at" - puts "https://github.com/IQSS/dataverse-ansible" + puts "https://github.com/GlobalDataverseCommunityConsortium/dataverse-ansible" exit 1 config.vm.provider "virtualbox" do |vbox| diff --git a/doc/release-notes/4.20-release-notes b/doc/release-notes/4.20-release-notes.md similarity index 100% rename from doc/release-notes/4.20-release-notes rename to doc/release-notes/4.20-release-notes.md diff --git a/doc/release-notes/6938-analytics-reload b/doc/release-notes/6938-analytics-reload new file mode 100644 index 00000000000..40afb847a21 --- /dev/null +++ b/doc/release-notes/6938-analytics-reload @@ -0,0 +1,5 @@ +### Custom Analytics Code Changes + +You should update your custom analytics code to implement necessary changes for tracking updated dataset and file buttons. There was also a fix to the analytics code that will now properly track downloads for tabular files. + +We have updated the documentation and sample analytics code snippet provided in [Installation Guide > Configuration > Web Analytics Code](http://guides.dataverse.org/en/latest/installation/config.html#web-analytics-code) to reflect the changes implemented in this version (#6938/#6684). \ No newline at end of file diff --git a/doc/release-notes/7040-project-moves.md b/doc/release-notes/7040-project-moves.md new file mode 100644 index 00000000000..3f4a5c3ed50 --- /dev/null +++ b/doc/release-notes/7040-project-moves.md @@ -0,0 +1,8 @@ +## Notes for Dataverse Installation Administrators + +### Location Changes for Related Projects + +The dataverse-ansible and dataverse-previewers repositories have been moved to the GDCC Organization on GitHub. If you have been referencing the dataverse-ansible repository from IQSS and the dataverse-previewers from QDR, please instead use them from their new locations: + + + diff --git a/doc/sphinx-guides/source/_static/docsdataverse_org.css b/doc/sphinx-guides/source/_static/docsdataverse_org.css index 7ed784e91e0..e8de49c89aa 100755 --- a/doc/sphinx-guides/source/_static/docsdataverse_org.css +++ b/doc/sphinx-guides/source/_static/docsdataverse_org.css @@ -103,6 +103,67 @@ div.code-example .messagePanel .glyphicon { div.code-example span.glyphicon-question-sign.tooltip-icon {color: #99bcdb !important;} div.code-example span.glyphicon-question-sign.tooltip-icon:hover {color: #337AB7 !important;} +div.code-example .btn-group .btn-link {font-size:2em !important;} +div.code-example .btn-group .btn.btn-link.bootstrap-button-tooltip span.caret {margin-left: 6px;} + +div.code-example .btn.btn-default.btn-access {background-color: #fff;background-image:none;} +div.code-example .btn.btn-default.btn-access:focus, div.code-example .btn.btn-default.btn-access:hover, div.code-example .btn.btn-default.btn-access:active {background-color: #e6e6e6;background-image:none;} + +/* Dropdown submenu */ +div.code-example .dropdown-submenu { + position: relative; +} +div.code-example .dropdown-submenu.pull-left { + width: 100%; +} +div.code-example .dropdown-submenu>.dropdown-menu { + top: 0; + left: 100%; + margin-top: -6px; + margin-left: -1px; + -webkit-border-radius: 0 6px 6px 6px; + -moz-border-radius: 0 6px 6px; + border-radius: 0 6px 6px 6px; +} +div.code-example .dropdown-submenu.pull-left>.dropdown-menu { + float: right; + min-width: 100%; +} + +div.code-example .dropdown-submenu:hover>.dropdown-menu { + display: block; +} + +div.code-example .dropdown-submenu>a:after { + display: block; + content: " "; + float: right; + width: 0; + height: 0; + border-color: transparent; + border-style: solid; + border-width: 5px 0 5px 5px; + border-left-color: #ccc; + margin-top: 5px; + margin-right: -10px; +} + +div.code-example .dropdown-submenu:hover>a:after { + border-left-color: #fff; +} + +div.code-example .dropdown-submenu.pull-left { + float: none; +} + +div.code-example .dropdown-submenu.pull-left>.dropdown-menu { + left: -100%; + margin-left: 10px; + -webkit-border-radius: 6px 0 6px 6px; + -moz-border-radius: 6px 0 6px 6px; + border-radius: 6px 0 6px 6px; +} + #breadcrumbNavBlock {margin-top:.5em;} #breadcrumbNavBlock .breadcrumbBlock {float:left; position:relative;} #breadcrumbNavBlock .breadcrumbActive {font-weight:bold;} diff --git a/doc/sphinx-guides/source/_static/installation/files/var/www/dataverse/branding/analytics-code.html b/doc/sphinx-guides/source/_static/installation/files/var/www/dataverse/branding/analytics-code.html index ca703dddf11..bd8e615349f 100644 --- a/doc/sphinx-guides/source/_static/installation/files/var/www/dataverse/branding/analytics-code.html +++ b/doc/sphinx-guides/source/_static/installation/files/var/www/dataverse/branding/analytics-code.html @@ -10,7 +10,7 @@ function enableAnalyticsEventCapture() { // Download button - $(document).on("click", ".btn-download", function() { + $(document).on("click", ".btn-download, .btn-download.dropdown-toggle + ul.dropdown-menu li a", function() { var category = $(this).text(); var label = getFileId($(this)); gtag('event', 'Download',{'event_category' : category, @@ -26,7 +26,7 @@ }); // Explore button - $(document).on("click", ".btn-explore + ul.dropdown-menu li a", function() { + $(document).on("click", ".btn-explore, .btn-explore.dropdown-toggle + ul.dropdown-menu li a", function() { var category = $(this).text(); var label = getFileId($(this)); gtag('event', 'Explore', {'event_category' : category, @@ -34,7 +34,7 @@ }); // Compute button - $(document).on("click", ".btn-compute", function() { + $(document).on("click", ".btn-compute, .btn-compute.dropdown-toggle + ul.dropdown-menu li a", function() { var category = $(this).text(); var label = getFileId($(this)); gtag('event', 'Compute', {'event_category' : category, @@ -42,7 +42,7 @@ }); // Preview button - $(document).on("click", ".btn-preview", function() { + $(document).on("click", ".btn-preview, .btn-preview.dropdown-toggle + ul.dropdown-menu li a", function() { var category = $(this).text(); var label = getFileId($(this)); gtag('event', 'Preview', {'event_category' : category, @@ -110,7 +110,7 @@ function getFileId(target) { var label = 'Unknown'; - if(target.parents('th').length>0) { + if(target.parents('th, #actionButtonBlock .btn-access-dataset + ul.dropdown-menu').length>0) { //Special case - the Download button that downloads all selected files in the dataset label = 'file(s) from ' + stripId($('#datasetForm').attr('action')); } else { diff --git a/doc/sphinx-guides/source/admin/index.rst b/doc/sphinx-guides/source/admin/index.rst index 6ff611cb55f..55733ffd99a 100755 --- a/doc/sphinx-guides/source/admin/index.rst +++ b/doc/sphinx-guides/source/admin/index.rst @@ -26,6 +26,7 @@ This guide documents the functionality only available to superusers (such as "da dataverses-datasets solr-search-index ip-groups + mail-groups monitoring reporting-tools-and-queries maintenance diff --git a/doc/sphinx-guides/source/admin/mail-groups.rst b/doc/sphinx-guides/source/admin/mail-groups.rst new file mode 100644 index 00000000000..a7d15af52b4 --- /dev/null +++ b/doc/sphinx-guides/source/admin/mail-groups.rst @@ -0,0 +1,82 @@ +Mail Domain Groups +================== + +Groups can be defined based on the domain part of users (verified) email addresses. Email addresses that match +one or more groups configuration will add the user to them. + +Within the scientific community, in many cases users will use a institutional email address for their account in a +Dataverse installation. This might offer a simple solution for building groups of people, as the domain part can be +seen as a selector for group membership. + +Some use cases: installations that like to avoid Shibboleth, enable self sign up, offer multi-tenancy or can't use +:doc:`ip-groups` plus many more. + +.. hint:: Please be aware that non-verified mail addresses will exclude the user even if matching. This is to avoid + privilege escalation. + +Listing Mail Domain Groups +-------------------------- + +Mail Domain Groups can be listed with the following curl command: + +``curl http://localhost:8080/api/admin/groups/domain`` + +Listing a specific Mail Domain Group +------------------------------------ + +Let's say you used "domainGroup1" as the alias of the Mail Domain Group you created below. +To list just that Mail Domain Group, you can include the alias in the curl command like this: + +``curl http://localhost:8080/api/admin/groups/domain/domainGroup1`` + + +Creating a Mail Domain Group +---------------------------- + +Mail Domain Groups can be created with a simple JSON file: + +.. code-block:: json + :caption: domainGroup1.json + :name: domainGroup1.json + + { + "name": "Users from @example.org", + "alias": "exampleorg", + "description": "Any verified user from Example Org will be included in this group.", + "domains": ["example.org"] + } + +Giving a ``description`` is optional. The ``name`` will be visible in the permission UI, so be sure to pick a sensible +value. + +The ``domains`` field is mandatory to be an array. This enables creation of multi-domain groups, too. + +Obviously you can create as many of these groups you might like, as long as the ``alias`` is unique. + +To load it into your Dataverse installation, either use a ``POST`` or ``PUT`` request (see below): + +``curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/groups/domain --upload-file domainGroup1.json`` + +Updating a Mail Domain Group +---------------------------- + +Editing a group is done by replacing it. Grab your group definition like the :ref:`above example `, +change it as you like and ``PUT`` it into your installation: + +``curl -X PUT -H 'Content-type: application/json' http://localhost:8080/api/admin/groups/domain/domainGroup1 --upload-file domainGroup1.json`` + +Please make sure that the alias of the group you want to change is included in the path. You also need to ensure +that this alias matches with the one given in your JSON file. + +.. hint:: This is an idempotent call, so it will create the group given if not present. + +Deleting a Mail Domain Group +---------------------------- + +To delete a Mail Domain Group with an alias of "domainGroup1", use the curl command below: + +``curl -X DELETE http://localhost:8080/api/admin/groups/domain/domainGroup1`` + +Please note: it is not recommended to delete a Mail Domain Group that has been assigned roles. If you want to delete +a Mail Domain Group, you should first remove its permissions. + diff --git a/doc/sphinx-guides/source/admin/troubleshooting.rst b/doc/sphinx-guides/source/admin/troubleshooting.rst index 8773a1c6d38..0c752924b30 100644 --- a/doc/sphinx-guides/source/admin/troubleshooting.rst +++ b/doc/sphinx-guides/source/admin/troubleshooting.rst @@ -20,7 +20,7 @@ There are several types of dataset locks. Locks can be managed using the locks A It's normal for the ingest process described in the :doc:`/user/tabulardataingest/ingestprocess` section of the User Guide to take some time but if hours or days have passed and the dataset is still locked, you might want to inspect the locks and consider deleting some or all of them. It is recommended to restart the application server if you are deleting an ingest lock, to make sure the ingest job is no longer running in the background. Ingest locks are idetified by the label ``Ingest`` in the ``reason`` column of the ``DatasetLock`` table in the database. -A dataset is locked with a lock of type ``finalizePublication`` while the persistent identifiers for the datafiles in the dataset are registered or updated, and/or while the physical files are being validated by recalculating the checksums and verifying them against the values stored in the database, before the publication process can be completed (Note that either of the two tasks can be disabled via database options - see :doc:`config`). If a dataset has been in this state for a long period of time, for hours or longer, it is somewhat safe to assume that it is stuck (for example, the process may have been interrupted by an application server restart, or a system crash), so you may want to remove the lock (to be safe, do restart the application server, to ensure that the job is no longer running in the background) and advise the user to try publishing again. See :doc:`dataverses-datasets` for more information on publishing. +A dataset is locked with a lock of type ``finalizePublication`` while the persistent identifiers for the datafiles in the dataset are registered or updated, and/or while the physical files are being validated by recalculating the checksums and verifying them against the values stored in the database, before the publication process can be completed (Note that either of the two tasks can be disabled via database options - see :doc:`/installation/config`). If a dataset has been in this state for a long period of time, for hours or longer, it is somewhat safe to assume that it is stuck (for example, the process may have been interrupted by an application server restart, or a system crash), so you may want to remove the lock (to be safe, do restart the application server, to ensure that the job is no longer running in the background) and advise the user to try publishing again. See :doc:`dataverses-datasets` for more information on publishing. If any files in the dataset fail the validation above the dataset will be left locked with a ``DatasetLock.Reason=FileValidationFailed``. The user will be notified that they need to contact their Dataverse support in order to address the issue before another attempt to publish can be made. The admin will have to address and fix the underlying problems (by either restoring the missing or corrupted files, or by purging the affected files from the dataset) before deleting the lock and advising the user to try to publish again. The goal of the validation framework is to catch these types of conditions while the dataset is still in DRAFT. diff --git a/doc/sphinx-guides/source/api/apps.rst b/doc/sphinx-guides/source/api/apps.rst index 6fca5891202..c1d3a0a5395 100755 --- a/doc/sphinx-guides/source/api/apps.rst +++ b/doc/sphinx-guides/source/api/apps.rst @@ -1,7 +1,7 @@ Apps ==== -The introduction of Dataverse APIs has fostered the development of a variety of software applications that are listed in the :doc:`/admin/integrations`, :doc:`/admin/external-tools`, and :doc:`/admin/reporting-tools` sections of the Admin Guide. +The introduction of Dataverse APIs has fostered the development of a variety of software applications that are listed in the :doc:`/admin/integrations`, :doc:`/admin/external-tools`, and :doc:`/admin/reporting-tools-and-queries` sections of the Admin Guide. The apps below are open source and demonstrate how to use Dataverse APIs. Some of these apps are built on :doc:`/api/client-libraries` that are available for Dataverse APIs in Python, Javascript, R, and Java. diff --git a/doc/sphinx-guides/source/api/external-tools.rst b/doc/sphinx-guides/source/api/external-tools.rst index e122358144b..f9231f55359 100644 --- a/doc/sphinx-guides/source/api/external-tools.rst +++ b/doc/sphinx-guides/source/api/external-tools.rst @@ -163,7 +163,7 @@ If you've thought to yourself that there ought to be an app store for Dataverse Demoing Your External Tool ++++++++++++++++++++++++++ -https://demo.dataverse.org is the place to play around with Dataverse and your tool can be included. Please email support@dataverse.org to start the conversation about adding your tool. Additionally, you are welcome to open an issue at https://github.com/IQSS/dataverse-ansible which already includes a number of the tools listed above. +https://demo.dataverse.org is the place to play around with Dataverse and your tool can be included. Please email support@dataverse.org to start the conversation about adding your tool. Additionally, you are welcome to open an issue at https://github.com/GlobalDataverseCommunityConsortium/dataverse-ansible which already includes a number of the tools listed above. Announcing Your External Tool +++++++++++++++++++++++++++++ diff --git a/doc/sphinx-guides/source/developers/deployment.rst b/doc/sphinx-guides/source/developers/deployment.rst index ceb618df4dd..cc6b3c24673 100755 --- a/doc/sphinx-guides/source/developers/deployment.rst +++ b/doc/sphinx-guides/source/developers/deployment.rst @@ -73,7 +73,7 @@ If you are having trouble configuring the files manually as described above, see Configure Ansible File (Optional) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -In order to configure Dataverse settings such as the password of the dataverseAdmin user, download https://raw.githubusercontent.com/IQSS/dataverse-ansible/master/defaults/main.yml and edit the file to your liking. +In order to configure Dataverse settings such as the password of the dataverseAdmin user, download https://raw.githubusercontent.com/GlobalDataverseCommunityConsortium/dataverse-ansible/master/defaults/main.yml and edit the file to your liking. You can skip this step if you're fine with the values in the "main.yml" file in the link above. @@ -82,9 +82,9 @@ Download and Run the "Create Instance" Script Once you have done the configuration above, you are ready to try running the "ec2-create-instance.sh" script to spin up Dataverse in AWS. -Download :download:`ec2-create-instance.sh` and put it somewhere reasonable. For the purpose of these instructions we'll assume it's in the "Downloads" directory in your home directory. +Download :download:`ec2-create-instance.sh` and put it somewhere reasonable. For the purpose of these instructions we'll assume it's in the "Downloads" directory in your home directory. -To run it with default values you just need the script, but you may also want a current copy of the ansible :download:`group vars`_ file. +To run it with default values you just need the script, but you may also want a current copy of the ansible :download:`group vars`_ file. ec2-create-instance accepts a number of command-line switches, including: diff --git a/doc/sphinx-guides/source/developers/testing.rst b/doc/sphinx-guides/source/developers/testing.rst index 240f691aa2c..71a2e2eaa64 100755 --- a/doc/sphinx-guides/source/developers/testing.rst +++ b/doc/sphinx-guides/source/developers/testing.rst @@ -125,7 +125,7 @@ different people. For our purposes, an integration test can have to flavors: - Operate on an installation of Dataverse that is running and able to talk to both PostgreSQL and Solr. - Written using REST Assured. -2. Be a `Testcontainers `_ Test: +2. Be a `Testcontainers `__ Test: - Operates any dependencies via the Testcontainers API, using containers. - Written as a JUnit test, using all things necessary to test. @@ -137,10 +137,10 @@ Running the Full API Test Suite Using EC2 To run the API test suite in an EC2 instance you should first follow the steps in the :doc:`deployment` section to get set up for AWS in general and EC2 in particular. -You may always retrieve a current copy of the ec2-create-instance.sh script and accompanying group_var.yml file from the `dataverse-ansible repo`_: +You may always retrieve a current copy of the ec2-create-instance.sh script and accompanying group_var.yml file from the `dataverse-ansible repo`_: -- `ec2-create-instance.sh`_ -- `main.yml`_ +- `ec2-create-instance.sh`_ +- `main.yml`_ Edit ``main.yml`` to set the desired GitHub repo, branch, and to ensure that the API test suite is enabled: @@ -149,7 +149,7 @@ Edit ``main.yml`` to set the desired GitHub repo, branch, and to ensure that the - ``dataverse.api.test_suite: true`` - ``dataverse.sampledata.enabled: true`` -If you wish, you may pass the local path of a logging directory, which will tell ec2-create-instance.sh to `grab various logs `_ for your review. +If you wish, you may pass the local path of a logging directory, which will tell ec2-create-instance.sh to `grab various logs `_ for your review. Finally, run the script: @@ -157,7 +157,7 @@ Finally, run the script: $ ./ec2-create-instance.sh -g main.yml -l log_dir -Near the beginning and at the end of the ec2-create-instance.sh output you will see instructions for connecting to the instance via SSH. If you are actively working on a branch and want to refresh the warfile after each commit, you may wish to call a `redeploy.sh `_ script placed by the Ansible role, which will do a "git pull" against your branch, build the warfile, deploy the warfile, then restart the app server. By default this script is written to /tmp/dataverse/redeploy.sh. You may invoke the script by appending it to the SSH command in ec2-create's output: +Near the beginning and at the end of the ec2-create-instance.sh output you will see instructions for connecting to the instance via SSH. If you are actively working on a branch and want to refresh the warfile after each commit, you may wish to call a `redeploy.sh `_ script placed by the Ansible role, which will do a "git pull" against your branch, build the warfile, deploy the warfile, then restart the app server. By default this script is written to /tmp/dataverse/redeploy.sh. You may invoke the script by appending it to the SSH command in ec2-create's output: .. code-block:: bash @@ -258,7 +258,7 @@ Writing and Using a Testcontainers Test ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Most scenarios of integration testing involve having dependent services running. -This is where `Testcontainers `_ kicks in by +This is where `Testcontainers `__ kicks in by providing a JUnit interface to drive them before and after executing your tests. Test scenarios are endless. Some examples are migration scripts, persistance, @@ -502,7 +502,7 @@ Installation Testing - Run `vagrant up` on a server to test the installer: http://guides.dataverse.org/en/latest/developers/tools.html#vagrant . We haven't been able to get this working in Travis: https://travis-ci.org/IQSS/dataverse/builds/96292683 . Perhaps it would be possible to use AWS as a provider from Vagrant judging from https://circleci.com/gh/critical-alert/circleci-vagrant/6 . - Work with @lwo to automate testing of https://github.com/IQSS/dataverse-puppet . Consider using Travis: https://github.com/IQSS/dataverse-puppet/issues/10 -- Work with @donsizemore to automate testing of https://github.com/IQSS/dataverse-ansible with Travis or similar. +- Work with @donsizemore to automate testing of https://github.com/GlobalDataverseCommunityConsortium/dataverse-ansible with Travis or similar. Future Work on Load/Performance Testing ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -514,7 +514,7 @@ Future Work on Load/Performance Testing Future Work on Accessibility Testing ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -- Using https://github.com/IQSS/dataverse-ansible and hooks available from accessibily testing tools, automate the running of accessibility tools on PRs so that developers will receive quicker feedback on proposed code changes that reduce the accessibility of the application. +- Using https://github.com/GlobalDataverseCommunityConsortium/dataverse-ansible and hooks available from accessibility testing tools, automate the running of accessibility tools on PRs so that developers will receive quicker feedback on proposed code changes that reduce the accessibility of the application. ---- diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index a026c4e19ff..25bcea41532 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -250,7 +250,7 @@ Dataverse can alternately store files in a Swift or S3-compatible object store, The following sections describe how to set up various types of stores and how to configure for multiple stores. Multi-store Basics -+++++++++++++++++ +++++++++++++++++++ To support multiple stores, Dataverse now requires an id, type, and label for each store (even for a single store configuration). These are configured by defining two required jvm options: @@ -259,16 +259,16 @@ To support multiple stores, Dataverse now requires an id, type, and label for ea ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.files..type=" ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.files..label=