Skip to content

Commit

Permalink
Merge branch 'galaxyproject:main' into location_test
Browse files Browse the repository at this point in the history
  • Loading branch information
pauldg authored Feb 5, 2024
2 parents f168c74 + 97405a5 commit 99a3be7
Show file tree
Hide file tree
Showing 3 changed files with 33 additions and 1 deletion.
20 changes: 20 additions & 0 deletions tests/test_helpers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
"""Unit tests module for the helper functions"""
import unittest
from tpv.commands.test import mock_galaxy
from tpv.core.helpers import get_dataset_attributes


class TestHelpers(unittest.TestCase):
"""Tests for helper functions"""
def test_get_dataset_attributes(self):
"""Test that the function returns a dictionary with the correct attributes"""
job = mock_galaxy.Job()
job.add_input_dataset(
mock_galaxy.DatasetAssociation(
"test",
mock_galaxy.Dataset("test.txt", file_size=7*1024**3, object_store_id="files1")
)
)
dataset_attributes = get_dataset_attributes(job.input_datasets)
expected_result = {0: {'object_store_id': 'files1', 'size': 7*1024**3}}
self.assertEqual(dataset_attributes, expected_result)
3 changes: 2 additions & 1 deletion tpv/commands/test/mock_galaxy.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,11 +37,12 @@ def __init__(self, name, dataset):
class Dataset:
counter = 0

def __init__(self, file_name, file_size):
def __init__(self, file_name, file_size, object_store_id=None):
self.id = self.counter
self.counter += 1
self.file_name = file_name
self.file_size = file_size
self.object_store_id = object_store_id

def get_size(self, calculate_size=False):
return self.file_size
Expand Down
11 changes: 11 additions & 0 deletions tpv/core/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,3 +114,14 @@ def tool_version_gte(tool, version):

def tool_version_gt(tool, version):
return parse_version(tool.version) > parse_version(version)


def get_dataset_attributes(datasets):
# Return a dictionary of dataset ids and their object store ids
# and file sizes in bytes for all input datasets in a job
return {
i.dataset.dataset.id: {
'object_store_id': i.dataset.dataset.object_store_id,
'size': get_dataset_size(i.dataset.dataset)}
for i in datasets or {}
}

0 comments on commit 99a3be7

Please sign in to comment.