Skip to content
This repository has been archived by the owner on Sep 18, 2024. It is now read-only.

[Code merge] Merge code from dogfood-v1 branch #4

Merged
merged 1 commit into from
Aug 24, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
45 changes: 24 additions & 21 deletions Makefile
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
BIN_PATH ?= $(HOME)/.nni/bin/
NNI_PATH ?= $(HOME)/.nni/
BIN_PATH ?= /usr/bin
NODE_PATH ?= /usr/share
EXAMPLE_PATH ?= /usr/share/nni/examples

SRC_DIR := ${PWD}

Expand All @@ -20,48 +21,50 @@ build:


install:
mkdir -p $(NNI_PATH)
mkdir -p $(BIN_PATH)
mkdir -p $(NODE_PATH)/nni
mkdir -p $(EXAMPLE_PATH)

### Installing NNI Manager ###
cp -rT src/nni_manager/dist $(NNI_PATH)nni_manager
cp -rT src/nni_manager/node_modules $(NNI_PATH)nni_manager/node_modules
cp -rT src/nni_manager/dist $(NODE_PATH)/nni/nni_manager
cp -rT src/nni_manager/node_modules $(NODE_PATH)/nni/nni_manager/node_modules

### Installing Web UI ###
cp -rT src/webui/build $(NNI_PATH)webui
ln -sf $(NNI_PATH)nni_manager/node_modules/serve/bin/serve.js $(BIN_PATH)serve
cp -rT src/webui/build $(NODE_PATH)/nni/webui
ln -sf $(NODE_PATH)/nni/nni_manager/node_modules/serve/bin/serve.js $(BIN_PATH)/serve

### Installing Python SDK dependencies ###
pip3 install -r src/sdk/pynni/requirements.txt
### Installing Python SDK ###
cd src/sdk/pynni && pip3 install -e .
cd src/sdk/pynni && python3 setup.py install

### Installing nnictl ###
cd tools && pip3 install -e .
cd tools && python3 setup.py install

echo '#!/bin/sh' > $(BIN_PATH)nnimanager
echo 'cd $(NNI_PATH)nni_manager && node main.js $$@' >> $(BIN_PATH)nnimanager
chmod +x $(BIN_PATH)nnimanager
echo '#!/bin/sh' > $(BIN_PATH)/nnimanager
echo 'cd $(NODE_PATH)/nni/nni_manager && node main.js $$@' >> $(BIN_PATH)/nnimanager
chmod +x $(BIN_PATH)/nnimanager

install -m 755 tools/nnictl $(BIN_PATH)nnictl
install -m 755 tools/nnictl $(BIN_PATH)/nnictl

### Installing examples ###
cp -rT examples $(NNI_PATH)examples
cp -rT examples $(EXAMPLE_PATH)


dev-install:
### Installing Python SDK dependencies ###
pip3 install -r src/sdk/pynni/requirements.txt
pip3 install --user -r src/sdk/pynni/requirements.txt
### Installing Python SDK ###
cd src/sdk/pynni && pip3 install -e .
cd src/sdk/pynni && pip3 install --user -e .

### Installing nnictl ###
cd tools && pip3 install -e .
cd tools && pip3 install --user -e .


uninstall:
-rm -r $(NNI_PATH)
-rm -r $(BIN_PATH)
-rm -r $(EXAMPLE_PATH)
-rm -r $(NODE_PATH)/nni
-pip3 uninstall -y nnictl
-pip3 uninstall -y nni

-rm $(BIN_PATH)/nnictl
-rm $(BIN_PATH)/nnimanager
-rm $(BIN_PATH)/serve
14 changes: 5 additions & 9 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,25 +1,21 @@
# Neural Network Intelligence

[![Build Status](https://travis-ci.org/Microsoft/NeuralNetworkIntelligence.svg?branch=master)](https://travis-ci.org/Microsoft/NeuralNetworkIntelligence)

## Introduction
# Introduction
Neural Network Intelligence(NNI) is a light package for supporting hyper-parameter tuning or neural architecture search.
It could easily run in different environments, such as: local/remote machine/cloud.
And it offers a new annotation language for user to conveniently design search space.
Also user could write code using any language or any machine learning framework.

## Getting Started
# Getting Started
TODO: Guide users through getting your code up and running on their own system. In this section you can talk about:
1. Installation process
2. Software dependencies
3. Latest releases
4. API references

## Build and Test
# Build and Test
TODO: Describe and show how to build your code and run the tests.

## Contribute
# Contribute
TODO: Explain how other users and developers can contribute to make your code better.

## Privacy Statement
# Privacy Statement
The [Microsoft Enterprise and Developer Privacy Statement](https://privacy.microsoft.com/en-us/privacystatement) describes the privacy statement of this software.
4 changes: 2 additions & 2 deletions docs/EnableAssessor.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ assessor:
optimizationMode: Maximize
trial:
trialCommand: python mnist.py
trialCodeDir: $HOME/.nni/examples/trials/mnist-annotation
trialCodeDir: /usr/share/nni/examples/trials/mnist-annotation
trialGpuNum: 0
```
For our built-in assessors, you need to fill two fields: `assessorName` which chooses NNI provided assessors (refer to [here]() for built-in assessors), `optimizationMode` which includes Maximize and Minimize (you want to maximize or minimize your trial result).
Expand Down Expand Up @@ -54,7 +54,7 @@ assessor:
assessorGpuNum: 0
trial:
trialCommand: python mnist.py
trialCodeDir: $HOME/.nni/examples/trials/mnist-annotation
trialCodeDir: /usr/share/nni/examples/trials/mnist-annotation
trialGpuNum: 0
```
You only need to fill three field: `assessorCommand`, `assessorCodeDir` and `assessorGpuNum`.
51 changes: 23 additions & 28 deletions docs/GetStarted.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,39 +25,34 @@ The tool dispatchs and runs trail jobs that generated by tunning algorithms to s
* As a ML platform owner, you want to support AutoML in your platform

## **Setup**
* install using deb file

TBD

* install from source code

* __Dependencies__
nni requires:
```
### Prepare Node.js 10.8.0 or above
wget https://nodejs.org/dist/v10.8.0/node-v10.8.0-linux-x64.tar.xz
tar xf node-v10.8.0-linux-x64.tar.xz
mv node-v10.8.0-linux-x64/* /usr/local/node/
### Prepare Yarn 1.6.0 or above
wget https://github.com/yarnpkg/yarn/releases/download/v1.6.0/yarn-v1.6.0.tar.gz
tar xf yarn-v1.6.0.tar.gz
mv yarn-v1.6.0/* /usr/local/yarn/
### Add Node.js and Yarn in PATH
export PATH=/usr/local/node/bin:/usr/local/yarn/bin:$PATH
### clone nni source code
git clone ...
### build and install nni
make build
sudo make install
python >= 3.5
node >= 10.9.0
yarn >= 1.9.4
```
Before install nni, please make sure you have installed python environment correctly.
* __User installation__

* clone nni repository

git clone https://github.com/Microsoft/NeuralNetworkIntelligence

* run install.sh

cd NeuralNetworkIntelligence
sh ./install.sh

This documentation assumes you have setup one or more [training services]().
For more details about installation, please refer to [Installation instructions](Installation.md).

## **Quick start: run an experiment at local**
Requirements:
* local enviroment setup [TODO]

Run the following command to create an experiemnt for [mnist]
```bash
nnictl create --config $HOME/.nni/examples/trials/mnist-annotation/config.yaml
nnictl create --config /usr/share/nni/examples/trials/mnist-annotation/config.yml
```
This command will start the experiment and WebUI. The WebUI endpoint will be shown in the output of this command (for example, `http://localhost:8080`). Open this URL using your browsers. You can analyze your experiment through WebUI, or open trials' tensorboard.

Expand All @@ -69,9 +64,9 @@ An experiment is to run multiple trial jobs, each trial job tries a configuratio
* Provide a yaml experiment configure file
* (optional) Provide or choose an assessor

**Prepare trial**: Let's use a simple trial example, e.g. mnist, provided by NNI. After you installed NNI, NNI examples have been put in $HOME/.nni/examples, run `ls $HOME/.nni/examples/trials` to see all the trial examples. You can simply execute the following command to run the NNI mnist example:
**Prepare trial**: Let's use a simple trial example, e.g. mnist, provided by NNI. After you installed NNI, NNI examples have been put in /usr/share/nni/examples, run `ls /usr/share/nni/examples/trials` to see all the trial examples. You can simply execute the following command to run the NNI mnist example:

python $HOME/.nni/examples/trials/mnist-annotation/mnist.py
python /usr/share/nni/examples/trials/mnist-annotation/mnist.py

This command will be filled in the yaml configure file below. Please refer to [here]() for how to write your own trial.

Expand All @@ -82,7 +77,7 @@ This command will be filled in the yaml configure file below. Please refer to [h

*tunerName* is used to specify a tuner in NNI, *optimizationMode* is to indicate whether you want to maximize or minimize your trial's result.

**Prepare configure file**: Since you have already known which trial code you are going to run and which tuner you are going to use, it is time to prepare the yaml configure file. NNI provides a demo configure file for each trial example, `cat $HOME/.nni/examples/trials/mnist-annotation/config.yaml` to see it. Its content is basically shown below:
**Prepare configure file**: Since you have already known which trial code you are going to run and which tuner you are going to use, it is time to prepare the yaml configure file. NNI provides a demo configure file for each trial example, `cat /usr/share/nni/examples/trials/mnist-annotation/config.yml` to see it. Its content is basically shown below:

```
authorName: your_name
Expand All @@ -102,15 +97,15 @@ tuner:
optimizationMode: Maximize
trial:
trialCommand: python mnist.py
trialCodeDir: $HOME/.nni/examples/trials/mnist-annotation
trialCodeDir: /usr/share/nni/examples/trials/mnist-annotation
trialGpuNum: 0
```

Here *useAnnotation* is true because this trial example uses our python annotation (refer to [here]() for details). For trial, we should provide *trialCommand* which is the command to run the trial, provide *trialCodeDir* where the trial code is. The command will be executed in this directory. We should also provide how many GPUs a trial requires.

With all these steps done, we can run the experiment with the following command:

nnictl create --config $HOME/.nni/examples/trials/mnist-annotation/config.yaml
nnictl create --config /usr/share/nni/examples/trials/mnist-annotation/config.yml

You can refer to [here](NNICTLDOC.md) for more usage guide of *nnictl* command line tool.

Expand Down
28 changes: 28 additions & 0 deletions docs/Installation.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
Installation instructions
===

## install using deb file

TBD

## install from source code
* Prepare Node.js 10.9.0 or above

wget https://nodejs.org/dist/v10.9.0/node-v10.9.0-linux-x64.tar.xz
tar xf node-v10.9.0-linux-x64.tar.xz
mv node-v10.9.0-linux-x64/* /usr/local/node/
* Prepare Yarn 1.9.4 or above

wget https://github.com/yarnpkg/yarn/releases/download/v1.9.4/yarn-v1.9.4.tar.gz
tar xf yarn-v1.9.4.tar.gz
mv yarn-v1.9.4/* /usr/local/yarn/
* Add Node.js and Yarn in PATH

export PATH=/usr/local/node/bin:/usr/local/yarn/bin:$PATH
* clone nni source code

git clone https://github.com/Microsoft/NeuralNetworkIntelligence
* build and install nni

make build
sudo make install
8 changes: 4 additions & 4 deletions docs/NNICTLDOC.md
Original file line number Diff line number Diff line change
Expand Up @@ -182,24 +182,24 @@ nnictl log

### Manage experiment information

* __nnictl experiment ls__
* __nnictl experiment show__
* Description

Show the information of experiment.
* Usage

nnictl experiment ls
nnictl experiment show



* __nnictl config ls__
* __nnictl config show__
* Description

Display the current context information.

* Usage

nnictl config ls
nnictl config show

### Manage restful server
* __nnictl rest check__
Expand Down
2 changes: 1 addition & 1 deletion docs/RemoteMachineMode.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ tuner:
optimizationMode: Maximize
trial:
trialCommand: python mnist.py
trialCodeDir: $HOME/.nni/examples/trials/mnist-annotation
trialCodeDir: /usr/share/nni/examples/trials/mnist-annotation
trialGpuNum: 0
#machineList can be empty if the platform is local
machineList:
Expand Down
4 changes: 2 additions & 2 deletions examples/trials/cifar10/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ maxExecDuration: 1h
maxTrialNum: 1
#choice: local, remote
trainingServicePlatform: local
searchSpacePath: $HOME/.nni/examples/trials/cifar10/search_space.json
searchSpacePath: /usr/share/nni/examples/trials/cifar10/search_space.json
#choice: true, false
useAnnotation: false
tuner:
Expand All @@ -15,5 +15,5 @@ tuner:
optimizationMode: Maximize
trial:
trialCommand: python3 cifar10.py
trialCodeDir: $HOME/.nni/examples/trials/cifar10
trialCodeDir: /usr/share/nni/examples/trials/cifar10
trialGpuNum: 0
24 changes: 24 additions & 0 deletions examples/trials/cifar10/config_assessor.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
authorName: default
experimentName: example_cifar10
trialConcurrency: 1
maxExecDuration: 1h
maxTrialNum: 1
#choice: local, remote
trainingServicePlatform: local
searchSpacePath: /usr/share/nni/examples/trials/cifar10/search_space.json
#choice: true, false
useAnnotation: false
tuner:
#choice: TPE, Random, Anneal, Evolution
tunerName: TPE
#choice: Maximize, Minimize
optimizationMode: Maximize
assessor:
#choice: Medianstop
assessorName: Medianstop
#choice: Maximize, Minimize
optimizationMode: Maximize
trial:
trialCommand: python3 cifar10.py
trialCodeDir: /usr/share/nni/examples/trials/cifar10
trialGpuNum: 0
4 changes: 2 additions & 2 deletions examples/trials/ga_squad/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@ trainingServicePlatform: local
useAnnotation: false
tuner:
tunerCommand: python3 __main__.py
tunerCwd: $HOME/.nni/examples/tuners/ga_customer_tuner
tunerCwd: /usr/share/nni/examples/tuners/ga_customer_tuner
trial:
trialCommand: python3 trial.py
trialCodeDir: $HOME/.nni/examples/trials/ga_squad
trialCodeDir: /usr/share/nni/examples/trials/ga_squad
trialGpuNum: 0
2 changes: 1 addition & 1 deletion examples/trials/mnist-annotation/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,5 +14,5 @@ tuner:
optimizationMode: Maximize
trial:
trialCommand: python3 mnist.py
trialCodeDir: $HOME/.nni/examples/trials/mnist-annotation
trialCodeDir: /usr/share/nni/examples/trials/mnist-annotation
trialGpuNum: 0
4 changes: 2 additions & 2 deletions examples/trials/mnist-keras/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ maxExecDuration: 1h
maxTrialNum: 1
#choice: local, remote
trainingServicePlatform: local
searchSpacePath: $HOME/.nni/examples/trials/mnist-keras/search_space.json
searchSpacePath: /usr/share/nni/examples/trials/mnist-keras/search_space.json
#choice: true, false
useAnnotation: false
tuner:
Expand All @@ -15,5 +15,5 @@ tuner:
optimizationMode: Maximize
trial:
trialCommand: python3 mnist-keras.py
trialCodeDir: $HOME/.nni/examples/trials/mnist-keras
trialCodeDir: /usr/share/nni/examples/trials/mnist-keras
trialGpuNum: 0
2 changes: 1 addition & 1 deletion examples/trials/mnist-smartparam/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,5 +14,5 @@ tuner:
optimizationMode: Maximize
trial:
trialCommand: python3 mnist.py
trialCodeDir: $HOME/.nni/examples/trials/mnist-smartparam
trialCodeDir: /usr/share/nni/examples/trials/mnist-smartparam
trialGpuNum: 0
4 changes: 2 additions & 2 deletions examples/trials/mnist/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ maxExecDuration: 1h
maxTrialNum: 1
#choice: local, remote
trainingServicePlatform: local
searchSpacePath: $HOME/.nni/examples/trials/mnist/search_space.json
searchSpacePath: /usr/share/nni/examples/trials/mnist/search_space.json
#choice: true, false
useAnnotation: false
tuner:
Expand All @@ -15,5 +15,5 @@ tuner:
optimizationMode: Maximize
trial:
trialCommand: python3 mnist.py
trialCodeDir: $HOME/.nni/examples/trials/mnist
trialCodeDir: /usr/share/nni/examples/trials/mnist
trialGpuNum: 0
Loading