Skip to content

Commit

Permalink
Bug Bash Patch (open-mmlab#94)
Browse files Browse the repository at this point in the history
* transform wip

* mnist demo

* cv tutorial on cpu

* for demo

* wip

* kaggle patch

* handle trunked

* method

* python dependencies

* miscs

* add catboost

* rm ag.done
  • Loading branch information
zhanghang1989 authored Dec 4, 2019
1 parent 1ad1c02 commit 49ef052
Show file tree
Hide file tree
Showing 37 changed files with 773 additions and 437 deletions.
7 changes: 4 additions & 3 deletions Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@ stage("Unit Test") {
VISIBLE_GPU=env.EXECUTOR_NUMBER.toInteger() % 8
sh """#!/bin/bash
set -ex
# remove and create new env instead
conda env update -n autogluon_py3 -f docs/build.yml
conda env remove -n autogluon_py3
conda env create -n autogluon_py3 -f docs/build.yml
conda activate autogluon_py3
conda list
export CUDA_VISIBLE_DEVICES=${VISIBLE_GPU}
Expand All @@ -35,7 +35,8 @@ stage("Build Docs") {
sh """#!/bin/bash
set -ex
export CUDA_VISIBLE_DEVICES=${VISIBLE_GPU}
conda env update -n autogluon_docs -f docs/build_contrib.yml
conda env remove -n autogluon_docs
conda env create -n autogluon_docs -f docs/build_contrib.yml
conda activate autogluon_docs
export PYTHONPATH=\${PWD}
env
Expand Down
1 change: 1 addition & 0 deletions autogluon/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
try_import_mxnet()

from . import scheduler, searcher, nas, utils
from .scheduler import get_cpu_count, get_gpu_count

from .utils import *
from .core import *
Expand Down
8 changes: 4 additions & 4 deletions autogluon/core/decorator.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,12 +213,12 @@ def registered_class(Cls):
class autogluonobject(AutoGluonObject):
@_autogluon_kwargs(**kwvars)
def __init__(self, *args, **kwargs):
self._args = args
self._kwargs = kwargs
self.args = args
self.kwargs = kwargs
self._inited = False

def sample(self, **config):
kwargs = copy.deepcopy(self._kwargs)
kwargs = copy.deepcopy(self.kwargs)
kwspaces = copy.deepcopy(autogluonobject.kwspaces)
for k, v in kwargs.items():
if k in kwspaces and isinstance(kwspaces[k], NestedSpace):
Expand All @@ -227,7 +227,7 @@ def sample(self, **config):
elif k in config:
kwargs[k] = config[k]

args = self._args
args = self.args
return Cls(*args, **kwargs)

def __repr__(self):
Expand Down
8 changes: 8 additions & 0 deletions autogluon/core/loss.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
from mxnet.gluon import loss
from ..core import obj

__all__ = ['SoftmaxCrossEntropyLoss']

@obj()
class SoftmaxCrossEntropyLoss(loss.SoftmaxCrossEntropyLoss):
pass
2 changes: 0 additions & 2 deletions autogluon/core/optimizer.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
import ConfigSpace as CS
from mxnet import optimizer as optim

from ..core import obj

__all__ = ['Adam', 'NAG', 'SGD']
Expand Down
1 change: 1 addition & 0 deletions autogluon/scheduler/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from .import remote, resource
from .resource import get_cpu_count, get_gpu_count

# schedulers
from .scheduler import *
Expand Down
24 changes: 12 additions & 12 deletions autogluon/scheduler/remote/remote.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@ def __init__(self, remote_ip=None, port=None, local=False, ssh_username=None,
if not local:
remote_addr = (remote_ip + ':{}'.format(port))
self.service = DaskRemoteService(remote_ip, port, ssh_username,
ssh_port, ssh_private_key, remote_python,
remote_dask_worker)
ssh_port, ssh_private_key, remote_python,
remote_dask_worker)
super(Remote, self).__init__(remote_addr)
else:
super(Remote, self).__init__(processes=False)
Expand Down Expand Up @@ -85,15 +85,15 @@ def __init__(self, remote_addr, scheduler_port, ssh_username=None,
)
# Start worker nodes
self.worker = start_worker(
self.scheduler_addr,
self.scheduler_port,
remote_addr,
self.ssh_username,
self.ssh_port,
self.ssh_private_key,
self.remote_python,
self.remote_dask_worker,
)
self.scheduler_addr,
self.scheduler_port,
remote_addr,
self.ssh_username,
self.ssh_port,
self.ssh_private_key,
self.remote_python,
self.remote_dask_worker,
)
self.start_monitoring()

def start_monitoring(self):
Expand All @@ -118,7 +118,7 @@ def monitor_remote_processes(self):
time.sleep(0.1)

except KeyboardInterrupt:
self.shutdown()
pass

def shutdown(self):
all_processes = [self.worker, self.scheduler]
Expand Down
70 changes: 49 additions & 21 deletions autogluon/task/image_classification/classifier.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,22 @@
import os
import math
import pickle
import numpy as np
from PIL import Image

import mxnet as mx
import matplotlib.pyplot as plt
from mxnet.gluon.data.vision import transforms

from ...core import AutoGluonObject
from .utils import *
from .metrics import get_metric_instance
from ..base.base_predictor import BasePredictor
from ...utils import save, load, tqdm
from ...utils.pil_transforms import *

__all__ = ['Classifier']

class Classifier(BasePredictor):
"""
Classifier returned by task.fit()
Expand Down Expand Up @@ -54,35 +62,55 @@ def save(self, checkpoint):

def predict(self, X, input_size=224, plot=True):
""" This method should be able to produce predictions regardless if:
X = single data example (e.g. single image, single document),
X = batch of many examples, X = task.Dataset object
X = single data example (e.g. single image),
X = task.Dataset object
"""
"""The task predict function given an input.
Args:
img: the input
Example:
>>> ind, prob = classifier.predict('example.jpg')
Parameters
----------
X : str or :func:`autogluon.task.ImageClassification.Dataset`
path to the input image or dataset
Example:
>>> ind, prob = classifier.predict('example.jpg')
"""
# load and display the image
img = mx.image.imread(X) if isinstance(X, str) and os.path.isfile(X) else X
if plot:
plt.imshow(img.asnumpy())
plt.show()
# model inference
input_size = self.model.input_size if hasattr(self.model, 'input_size') else input_size
resize = int(math.ceil(input_size / 0.875))
transform_fn = transforms.Compose([
transforms.Resize(resize),
transforms.CenterCrop(input_size),
transforms.ToTensor(),
transform_fn = Compose([
Resize(resize),
CenterCrop(input_size),
ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
])
img = transform_fn(img)
proba = self.predict_proba(img)
ind = mx.nd.argmax(proba, axis=1).astype('int')
idx = mx.nd.stack(mx.nd.arange(proba.shape[0], ctx=proba.context),
ind.astype('float32'))
return ind, mx.nd.gather_nd(proba, idx)
def predict_img(img):
# load and display the image
proba = self.predict_proba(img)
ind = mx.nd.argmax(proba, axis=1).astype('int')
idx = mx.nd.stack(mx.nd.arange(proba.shape[0], ctx=proba.context),
ind.astype('float32'))
probai = mx.nd.gather_nd(proba, idx)
return ind, probai
if isinstance(X, str) and os.path.isfile(X):
img = self.loader(X)
if plot:
plt.imshow(np.array(img))
plt.show()
img = transform_fn(img)
return predict_img(img)
if isinstance(X, AutoGluonObject):
X = X.init()
inds, probas = [], []
for x in X:
ind, proba = predict_img(x)
inds.append(ind)
probas.append(proba)
return inds, probas

@staticmethod
def loader(path):
with open(path, 'rb') as f:
img = Image.open(f)
return img.convert('RGB')

def predict_proba(self, X):
""" Produces predicted class probabilities if we are dealing with a classification task.
Expand Down
Loading

0 comments on commit 49ef052

Please sign in to comment.