Skip to content

Commit

Permalink
relative pkg imports (#4)
Browse files Browse the repository at this point in the history
* pkg relative imports
* minor packages fix
* update init info
* update CI configs
* drop Shippable
* rename NB_THREADS -> NB_WORKERS
  • Loading branch information
Borda authored Nov 16, 2019
1 parent 4074296 commit 68c7ffa
Show file tree
Hide file tree
Showing 24 changed files with 135 additions and 204 deletions.
74 changes: 0 additions & 74 deletions .shippable.yml
Original file line number Diff line number Diff line change
@@ -1,74 +0,0 @@
# vim ft=yaml

# After changing this file, check it on:
# http://yaml-online-parser.appspot.com/

build:
# https://github.com/Shippable/support/issues/3873
ci:
- echo $CI_REPORTS && mkdir -p $CI_REPORTS
- echo $COVERAGE_REPORTS && mkdir -p $COVERAGE_REPORTS

env:
global:
- CI_REPORTS=shippable/testresults
- COVERAGE_REPORTS=shippable/codecoverage
- DISPLAY=""

language: python

sudo: true

python:
- 2.7
- 3.5

cache: pip

before_install:
- apt-get update --fix-missing
- apt-get install freetype*
- apt-get install python-dev python-tk
- apt-get install python3-dev python3-tk
- apt-get install pkg-config
- pip install --upgrade pip
- gcc --version ; python --version ; pip --version ; pwd ; ls -l

install:
- pip install -r requirements.txt
- pip install "nose>=1.3.7" coverage codecov "pytest>=3.0.5" flake8
- pip list

script:
- unset DISPLAY
- mkdir results
- python setup.py check -m -s
# - nosetests --with-xunit --xunit-file=shippable/testresults/nosetests.xml
- nosetests bpdl -v --exe --with-doctest --with-coverage --cover-package=bpdl
- python experiments/run_dataset_generate.py --nb_samples 25 --nb_patterns 2 --image_size 64 64
- python experiments/run_dataset_add_noise.py -p ./data_images
- nosetests experiments -v --exe --with-doctest --with-xunit --xunit-file=shippable/testresults/nosetests.xml
- flake8 . --ignore=E402,E731 --max-line-length=100

# - rm -rf results && mkdir results
# # EXPERIMENTS: pre-processing
# - python experiments/run_cut_minimal_images.py -i "./data_images/imaginal_discs/gene/*.png" -o ./data_images/imaginal_discs/gene_cut
# - python experiments/run_extract_fuzzy_activation.py -i "./data_images/ovary_stage-2/image/*.png" -o ./data_images/ovary_stage-2/gene
# - python experiments/run_extract_fuzzy_activation.py -i "./data_images/ovary_stage-3/image/*.png" -o ./data_images/ovary_stage-3/gene
#
# # EXPERIMENTS: core
# - python experiments/run_experiments.py --type synth -i ./data_images/syntheticDataset_vX -o ./results -c ./data_images/sample_config.yml
# - python experiments/run_experiments.py --type real -i ./data_images/imaginal_discs -o ./results --dataset gene_small --nb_workers 2
# - python experiments/run_reconstruction.py -e ./results/ExperimentBPDL_real_imaginal_discs_gene_small --visual
#
# # EXPERIMENTS: post-processing
# - python experiments/run_parse_experiments_result.py -i ./results -r results.csv
# - python experiments/run_recompute_experiments_result.py -i ./results
# - python experiments/run_parse_experiments_result.py -i ./results -r results_NEW.csv

after_success:
- python setup.py install

- codecov -t 9507eeee-6a1e-4313-87fa-f73064c539c9
- coverage xml -o $COVERAGE_REPORTS/coverage.xml
- coverage report
2 changes: 1 addition & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ script:
- flake8 . --ignore=E402,E731 --max-line-length=100

after_success:
- python setup.py install
- python setup.py install --user
- codecov # public repository on Travis CI
- coverage xml
- python-codacy-coverage -r coverage.xml
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@
[![codecov](https://codecov.io/gh/Borda/pyBPDL/branch/master/graph/badge.svg?token=Bgklw7uaB0)](https://codecov.io/gh/Borda/pyBPDL)
[![Codacy Badge](https://api.codacy.com/project/badge/Grade/5f4c0dfac2b4444a935d587f663ac9c1)](https://www.codacy.com/app/Borda/pyBPDL?utm_source=github.com&utm_medium=referral&utm_content=Borda/pyBPDL&utm_campaign=Badge_Grade)
[![CircleCI](https://circleci.com/gh/Borda/pyBPDL.svg?style=svg&circle-token=0b3f34bedf54747d32becd2f13cd0da71fef7548)](https://circleci.com/gh/Borda/pyBPDL)
[![Run Status](https://api.shippable.com/projects/5937c15c3e246207003bc61b/badge?branch=master)](https://app.shippable.com/github/Borda/pyBPDL)
[![CodeFactor](https://www.codefactor.io/repository/github/borda/pybpdl/badge)](https://www.codefactor.io/repository/github/borda/pybpdl)
[![Documentation Status](https://readthedocs.org/projects/pybpdl/badge/?version=latest)](https://pybpdl.readthedocs.io/en/latest/?badge=latest)
[![Gitter](https://badges.gitter.im/pyBPDL/community.svg)](https://gitter.im/pyBPDL/community?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge)
<!--
[![Run Status](https://api.shippable.com/projects/5937c15c3e246207003bc61b/badge?branch=master)](https://app.shippable.com/github/Borda/pyBPDL)
[![Coverage Badge](https://api.shippable.com/projects/5937c15c3e246207003bc61b/coverageBadge?branch=master)](https://app.shippable.com/github/Borda/pyBPDL)
-->

Expand Down
9 changes: 5 additions & 4 deletions bpdl/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@
np.set_printoptions(legacy='1.13')

# default display size was changed in pandas v0.23
if 'display.max_columns' in pd.core.config._registered_options:
pd.set_option('display.max_columns', 20)
pd.set_option('display.max_columns', 20)

except ImportError:
import traceback
traceback.print_exc()
Expand All @@ -35,8 +35,9 @@
__author_email__ = 'jiri.borovec@fel.cvut.cz'
__license__ = 'BSD 3-clause'
__homepage__ = 'https://borda.github.io/pyBPDL'
__copyright__ = 'Copyright (c) 2014-2019, Jiri Borovec.'
__doc__ = """# BPDL - Binary pattern Dictionary Learning
__copyright__ = 'Copyright (c) 2014-2019, %s.' % __author__
__doc__ = 'BPDL - Binary pattern Dictionary Learning'
__long_doc__ = "# %s" % __doc__ + """
The package contain Binary pattern Dictionary Learning (BPDL) which is image processing tool
for unsupervised pattern extraction and atlas estimation. Moreover the project/repository
Expand Down
10 changes: 5 additions & 5 deletions bpdl/data_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
Copyright (C) 2015-2018 Jiri Borovec <jiri.borovec@fel.cvut.cz>
"""
from __future__ import absolute_import

# from __future__ import absolute_import
import os
import glob
import logging
Expand All @@ -31,9 +31,9 @@
from imsegm.utilities.experiments import WrapExecuteSequence
from imsegm.utilities.data_io import io_imread, io_imsave

from bpdl.utilities import create_clean_folder
from .utilities import create_clean_folder

NB_THREADS = mproc.cpu_count()
NB_WORKERS = mproc.cpu_count()
IMAGE_SIZE_2D = (128, 128)
IMAGE_SIZE_3D = (16, 128, 128)
NB_BIN_PATTERNS = 9
Expand Down Expand Up @@ -597,7 +597,7 @@ def generate_rand_patterns_occlusion(idx, im_ptns, out_dir=None,

def dataset_binary_combine_patterns(im_ptns, out_dir=None, nb_samples=NB_SAMPLES,
ptn_ration=RND_PATTERN_OCCLUSION,
nb_workers=NB_THREADS, rand_seed=None):
nb_workers=NB_WORKERS, rand_seed=None):
""" generate a Binary dataset composed from N samples and given ration
of pattern occlusion
Expand Down Expand Up @@ -815,7 +815,7 @@ def wrapper_image_function(i_img, func, coef, out_dir):
return i, img_def


def dataset_apply_image_function(imgs, out_dir, func, coef=0.5, nb_workers=NB_THREADS):
def dataset_apply_image_function(imgs, out_dir, func, coef=0.5, nb_workers=NB_WORKERS):
""" having list if input images create an dataset with randomly deform set
of these images and export them to the results folder
Expand Down
14 changes: 7 additions & 7 deletions bpdl/dictionary_learning.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@
Copyright (C) 2015-2018 Jiri Borovec <jiri.borovec@fel.cvut.cz>
"""
from __future__ import absolute_import

# from __future__ import absolute_import
import os
import time
import logging
Expand All @@ -25,14 +25,14 @@
# using https://github.com/Borda/pyGCO
from gco import cut_general_graph, cut_grid_graph_simple

from bpdl.pattern_atlas import (
from .pattern_atlas import (
compute_positive_cost_images_weights, edges_in_image2d_plane, init_atlas_mosaic,
atlas_split_indep_ptn, reinit_atlas_likely_patterns, compute_relative_penalty_images_weights)
from bpdl.pattern_weights import (weights_image_atlas_overlap_major,
weights_image_atlas_overlap_partial)
from bpdl.metric_similarity import compare_atlas_adjusted_rand
from bpdl.data_utils import export_image
from bpdl.registration import register_images_to_atlas_demons
from .pattern_weights import (
weights_image_atlas_overlap_major, weights_image_atlas_overlap_partial)
from .metric_similarity import compare_atlas_adjusted_rand
from .data_utils import export_image
from .registration import register_images_to_atlas_demons

NB_GRAPH_CUT_ITER = 5
TEMPLATE_NAME_ATLAS = 'BPDL_{}_{}_iter_{:04d}'
Expand Down
6 changes: 3 additions & 3 deletions bpdl/pattern_atlas.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@
from skimage import morphology, measure, segmentation, filters
from scipy import ndimage as ndi

from bpdl.data_utils import image_deform_elastic, extract_image_largest_element
from bpdl.pattern_weights import (weights_label_atlas_overlap_threshold,
convert_weights_binary2indexes)
from .data_utils import image_deform_elastic, extract_image_largest_element
from .pattern_weights import (
weights_label_atlas_overlap_threshold, convert_weights_binary2indexes)

REINIT_PATTERN_COMPACT = True
UNARY_BACKGROUND = 1
Expand Down
10 changes: 5 additions & 5 deletions bpdl/registration.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

import time
import logging
import multiprocessing as mproc
# import multiprocessing as mproc
from functools import partial

import numpy as np
Expand All @@ -19,9 +19,9 @@
from dipy.align import VerbosityLevels
from dipy.align.imwarp import SymmetricDiffeomorphicRegistration, DiffeomorphicMap
from dipy.align.metrics import SSDMetric
from imsegm.utilities.experiments import WrapExecuteSequence
from imsegm.utilities.experiments import WrapExecuteSequence, nb_workers

NB_THREADS = int(mproc.cpu_count() * .8)
NB_WORKERS = nb_workers(0.8)

LIST_SDR_PARAMS = ('metric', 'level_iters', 'step_length', 'ss_sigma_factor',
'opt_tol', 'inv_iter', 'inv_tol', 'callback')
Expand Down Expand Up @@ -267,7 +267,7 @@ def wrapper_warp2d_transform_image(idx_img_deform, method='linear', inverse=Fals


def warp2d_images_deformations(list_images, list_deforms, method='linear',
inverse=False, nb_workers=NB_THREADS):
inverse=False, nb_workers=NB_WORKERS):
""" deform whole set of images to expected image domain
:param list(ndarray) list_images:
Expand Down Expand Up @@ -335,7 +335,7 @@ def wrapper_register_demons_image_weights(idx_img_weights, atlas, smooth_coef,
def register_images_to_atlas_demons(list_images, atlas, list_weights,
smooth_coef=1., params=None,
interp_method='linear', inverse=False,
rm_mean=True, nb_workers=NB_THREADS):
rm_mean=True, nb_workers=NB_WORKERS):
""" register whole set of images to estimated atlas and weights
IDEA: think about parallel registration per sets as for loading images
Expand Down
6 changes: 2 additions & 4 deletions bpdl/utilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,13 @@
import logging
import shutil
# import multiprocessing.pool
import multiprocessing as mproc
# import multiprocessing as mproc
# from functools import wraps

import numpy as np
from scipy import stats
from scipy.spatial import distance

NB_THREADS = mproc.cpu_count()


# def update_path(path_file, lim_depth=5, absolute=True):
# """ bubble in the folder tree up intil it found desired file
Expand Down Expand Up @@ -168,7 +166,7 @@ def generate_gauss_2d(mean, std, im_size=None, norm=None):
# return proc


# def wrap_execute_sequence(wrap_func, iterate_vals, nb_workers=NB_THREADS,
# def wrap_execute_sequence(wrap_func, iterate_vals, nb_workers=NB_WORKERS,
# desc='', ordered=False):
# """ wrapper for execution parallel of single thread as for...
#
Expand Down
Loading

0 comments on commit 68c7ffa

Please sign in to comment.