Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Replace remote S3 paths with local paths for test_snake dryruns. #915

Merged
merged 3 commits into from
Mar 3, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
149 changes: 78 additions & 71 deletions test/unit/test_snake.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,28 +6,33 @@
import os
import subprocess
import shutil
import tempfile
import argparse
import itertools
import unittest

import pytest
import yaml

import util.cmd
import util.file
from test import TestCaseWithTmp


needs_snakemake = pytest.mark.skipif(
sys.version_info < (3, 5),
reason='python version is too old for snakemake')


if sys.version_info >= (3, 5):
import snakemake


def add_to_sample_list(workdir, sample_list_name, sample_names):
with open(os.path.join(workdir, 'samples-{}.txt'.format(sample_list_name)), 'a') as outf:
for sample_name in sample_names:
outf.write(sample_name + '\n')


def setup_dummy_simple(sample_names=('G1234', 'G5678', 'G3671.1_r1', 'G3680-1_4', '9876', 'x.y-7b')):
def setup_dummy_simple(workdir, sample_names=('G1234', 'G5678', 'G3671.1_r1', 'G3680-1_4', '9876', 'x.y-7b')):
''' Set up a very simple project directory with empty input files. '''

workdir = tempfile.mkdtemp()
os.mkdir(os.path.join(workdir, 'data'))
os.mkdir(os.path.join(workdir, 'ref_genome_dir'))
os.mkdir(os.path.join(workdir, 'data', '00_raw'))
Expand All @@ -45,73 +50,75 @@ def setup_dummy_simple(sample_names=('G1234', 'G5678', 'G3671.1_r1', 'G3680-1_4'
add_to_sample_list(workdir, name, sample_names)

shutil.copy(os.path.join(util.file.get_project_path(), 'pipes', 'Snakefile'), workdir)
shutil.copy(os.path.join(util.file.get_project_path(), 'pipes', 'config.yaml'), workdir)

os.symlink(util.file.get_project_path(), os.path.join(workdir, 'bin'))
with open(os.path.join(util.file.get_project_path(), 'pipes', 'config.yaml')) as f:
config = yaml.load(f)

def translate_remote_s3(uri):
remote_path = uri[5:]
fake_s3_root = os.path.join(util.file.get_project_path(), 'test', 'input', 's3')
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Maybe all of the empty placeholder files should be made dynamically as part of the tests?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It's hard to determine what the filenames need to be dynamically. Either need to store a list of filenames to then create at runtime or create empty files - I'm ambivalent about these two.

local_path = os.path.join(fake_s3_root, remote_path)
return local_path

for k, v in config.items():
if isinstance(v, str):
if v.startswith('s3://'):
config[k] = translate_remote_s3(v)

if util.misc.is_nonstr_iterable(v):
for i, vv in enumerate(v):
if isinstance(vv, str):
if vv.startswith('s3://'):
v[i] = translate_remote_s3(vv)
with open(os.path.join(workdir, 'config.yaml'), 'w') as f:
yaml.dump(config, f)

os.symlink(util.file.get_project_path(), os.path.join(workdir, 'bin'))
return workdir


@unittest.skipIf(sys.version_info < (3, 5), "python version is too old for snakemake")
class TestSimpleDryRuns(TestCaseWithTmp):

def setUp(self):
super(TestSimpleDryRuns, self).setUp()
self.workdir = setup_dummy_simple()
self.env = {'GATK_PATH': os.environ.get('GATK_PATH'), 'NOVOALIGN_PATH': os.environ.get('NOVOALIGN_PATH')}

def tearDown(self):
for k, v in self.env.items():
if v:
os.environ[k] = v
super(TestSimpleDryRuns, self).tearDown()

def test_dryrun_all(self):
''' Test that the "all" rule dryruns properly '''
self.assertTrue(snakemake.snakemake(
os.path.join(self.workdir, 'Snakefile'),
#configfile=os.path.join(self.workdir, 'config.yaml'),
workdir=self.workdir,
dryrun=True))
self.assertTrue(snakemake.snakemake(
os.path.join(self.workdir, 'Snakefile'),
#configfile=os.path.join(self.workdir, 'config.yaml'),
workdir=self.workdir,
dryrun=True,
targets=['all']))

def test_dryrun_all_assemble(self):
''' Test that the "all_assemble" rule dryruns properly '''
self.assertTrue(snakemake.snakemake(
os.path.join(self.workdir, 'Snakefile'),
#configfile=os.path.join(self.workdir, 'config.yaml'),
workdir=self.workdir,
dryrun=True,
targets=['all_assemble']))

def test_dryrun_all_deplete(self):
''' Test that the "all_deplete" rule dryruns properly '''
self.assertTrue(snakemake.snakemake(
os.path.join(self.workdir, 'Snakefile'),
#configfile=os.path.join(self.workdir, 'config.yaml'),
workdir=self.workdir,
dryrun=True,
targets=['all_deplete']))

def test_dryrun_all_metagenomics(self):
''' Test that the "all_metagenomics" rule dryruns properly '''
self.assertTrue(snakemake.snakemake(
os.path.join(self.workdir, 'Snakefile'),
#configfile=os.path.join(self.workdir, 'config.yaml'),
workdir=self.workdir,
dryrun=True,
targets=['all_metagenomics']))

def test_missing_merge_inputs(self):
add_to_sample_list(self.workdir, 'assembly', 'G_missing')
res = snakemake.snakemake(
os.path.join(self.workdir, 'Snakefile'),
workdir=self.workdir,
dryrun=True,
targets=['all_assemble'])
assert res == False
@pytest.fixture
def workdir(request, tmpdir_function):
env = {'GATK_PATH': os.environ.get('GATK_PATH'), 'NOVOALIGN_PATH': os.environ.get('NOVOALIGN_PATH')}
setup_dummy_simple(tmpdir_function)
yield tmpdir_function
for k, v in env.items():
if v:
os.environ[k] = v


def call_snakemake(workdir, targets=None):
return snakemake.snakemake(
os.path.join(workdir, 'Snakefile'),
configfile=os.path.join(workdir, 'config.yaml'),
workdir=workdir,
dryrun=True,
targets=targets)


@needs_snakemake
def test_dryrun_all(workdir):
''' Test that the "all" rule dryruns properly '''
assert call_snakemake(workdir)
assert call_snakemake(workdir, ['all'])


@needs_snakemake
def test_dryrun_all_assemble(workdir):
''' Test that the "all_assemble" rule dryruns properly '''
assert call_snakemake(workdir, ['all_assemble'])

@needs_snakemake
def test_dryrun_all_deplete(workdir):
''' Test that the "all_deplete" rule dryruns properly '''
assert call_snakemake(workdir, ['all_deplete'])

@needs_snakemake
def test_dryrun_all_metagenomics(workdir):
''' Test that the "all_metagenomics" rule dryruns properly '''
assert call_snakemake(workdir, ['all_metagenomics'])

@needs_snakemake
def test_missing_merge_inputs(workdir):
add_to_sample_list(workdir, 'assembly', 'G_missing')
assert call_snakemake(workdir, ['all_assemble']) == False
29 changes: 16 additions & 13 deletions util/file.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ def tmp_dir(*args, **kwargs):
_args = inspect.getcallargs(tempfile.mkdtemp, *args, **kwargs)
length_margin = 6
for pfx_sfx in ('prefix', 'suffix'):
if _args[pfx_sfx]:
if _args[pfx_sfx]:
_args[pfx_sfx] = string_to_file_name(_args[pfx_sfx], file_system_path=_args['dir'], length_margin=length_margin)
length_margin += len(_args[pfx_sfx].encode('utf-8'))

Expand Down Expand Up @@ -321,6 +321,10 @@ def mkdir_p(dirpath):
else:
raise

def touch_p(path, times=None):
'''Touch file, making parent directories if they don't exist.'''
mkdir_p(os.path.dirname(path))
touch(path, times=times)

def open_or_gzopen(fname, *opts, **kwargs):
mode = 'r'
Expand Down Expand Up @@ -384,7 +388,7 @@ def read_tabfile_dict(inFile, header_prefix="#", skip_prefix=None, rowcount_limi
row = row[:len(header)] + [item for item in row[len(header):] if len(item)]
assert len(header) == len(row), "%s != %s" % (len(header), len(row))
yield dict((k, v) for k, v in zip(header, row) if v)

if rowcount_limit and lines_read==rowcount_limit:
break

Expand Down Expand Up @@ -853,14 +857,14 @@ def uncompressed_file_type(fname):
base, ext = os.path.splitext(base)
return ext

def repack_tarballs(out_compressed_tarball,
input_compressed_tarballs,
extract_to_disk_path=None,
extract_numeric_owner=False,
avoid_disk_roundtrip=True,
ignore_zeros=True,
pipe_hint_in=None,
pipe_hint_out=None,
def repack_tarballs(out_compressed_tarball,
input_compressed_tarballs,
extract_to_disk_path=None,
extract_numeric_owner=False,
avoid_disk_roundtrip=True,
ignore_zeros=True,
pipe_hint_in=None,
pipe_hint_out=None,
threads=None):
threads = util.misc.sanitize_thread_count(threads)

Expand Down Expand Up @@ -908,7 +912,7 @@ def __del__(self):

def read(self, size):
assert size is not None

buf = self.fileobj.read(size)
self.written_mirror_file.write(buf)
return buf
Expand Down Expand Up @@ -970,7 +974,6 @@ def read(self, size):
out_compress_ps.wait()
if out_compress_ps.returncode != 0:
raise subprocess.CalledProcessError(out_compress_ps.returncode, "Call error %s" % out_compress_ps.returncode)

if outfile is not None:
outfile.close()