Skip to content

Commit

Permalink
Merge pull request #511 from ahnitz/multicache
Browse files Browse the repository at this point in the history
use a per process weave cache directory
  • Loading branch information
duncan-brown committed Oct 19, 2015
2 parents edecb64 + 6c99007 commit 4068046
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 1 deletion.
4 changes: 3 additions & 1 deletion bin/hdfcoinc/pycbc_calculate_psd
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ pycbc.init_logging(args.verbose)

pycbc.psd.verify_psd_options(args, parser)
pycbc.strain.StrainSegments.verify_segment_options(args, parser)

def grouper(n, iterable):
args = [iter(iterable)] * n
return list([e for e in t if e != None] for t in itertools.izip_longest(*args))
Expand All @@ -38,6 +38,8 @@ def get_psd((seg, i)):
""" Get the PSDs for the given data chunck. This follows the same rules
as pycbc_inspiral for determining where to calculate PSDs
"""
pycbc.multiprocess_cache_dir()

logging.info('%d: getting strain for %.1f-%.1f (%.1f s)', i, seg[0],
seg[1], abs(seg))
args.gps_start_time = int(seg[0]) + args.pad_data
Expand Down
7 changes: 7 additions & 0 deletions pycbc/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,3 +125,10 @@ def sig_handler(signum, frame):
except ImportError as e:
print e
HAVE_MKL=False

def multiprocess_cache_dir():
import multiprocessing
cache_dir = os.path.join(_cache_dir_path, str(id(multiprocessing.current_process())))
os.environ['PYTHONCOMPILED'] = cache_dir
try: os.makedirs(cache_dir)
except OSError: pass

0 comments on commit 4068046

Please sign in to comment.