Skip to content

Commit

Permalink
fix bugs in v2.0
Browse files Browse the repository at this point in the history
  • Loading branch information
JiahangXu committed Jun 27, 2022
1 parent 0b5e0ce commit e66d7ef
Show file tree
Hide file tree
Showing 7 changed files with 41 additions and 27 deletions.
20 changes: 6 additions & 14 deletions docs/builder/build_kernel_latency_predictor.md
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ workspace = "/path/to/workspace/"
builder_config.init(workspace)

kernel_type = "conv-bn-relu"
mark = "prior"
mark = "test"
backend = "tflite_cpu"
error_threshold = 0.1

Expand All @@ -151,10 +151,10 @@ kernel_data = (cfgs_path, lats_path)

# build latency predictor
predictor, acc10, error_configs = build_predictor_by_data(
kernel_type, kernel_data, backend, error_threshold=error_threshold, mark="prior",
kernel_type, kernel_data, backend, error_threshold=error_threshold, mark=mark,
save_path=os.path.join(workspace, "predictor_build", "results")
)
print(f'Iteration 0: acc10 {acc10}, error_configs number: {len(error_configs)}')
print(f'Iteration 0: acc10 {acc10}, error_configs number: {len(error_configs)}')
```

In the implementation, the tuple of `kernel_data` includes items of `cfgs_path` and `lats_path`, indicating the config information and latency information respectively. `cfgs_path` and `lats_path` accept both json file path string or dictionary of models. In addition, if the config information and latency information are in the same data holder, users could directly specify `kernel_data = cfgs_path`.
Expand All @@ -174,7 +174,6 @@ from nn_meter.builder import build_initial_predictor_by_data
workspace = "/path/to/workspace/"
builder_config.init(workspace)

backend = connect_backend(backend_name="tflite_cpu")
kernel_type = "conv-bn-relu"
backend = "tflite_cpu"
error_threshold = 0.1
Expand Down Expand Up @@ -206,10 +205,9 @@ Here is an example for adaptive data sampling:
from nn_meter.builder import builder_config
from nn_meter.builder.backends import connect_backend
from nn_meter.builder import build_adaptive_predictor_by_data
workspace = "/data1/jiahang/working/release2test/"
workspace = "/path/to/workspace/"
builder_config.init(workspace)

backend = connect_backend(backend_name="tflite_cpu")
kernel_type = "conv-bn-relu"
backend = "tflite_cpu"
error_threshold = 0.1
Expand All @@ -221,14 +219,8 @@ predictor, data = build_adaptive_predictor_by_data(
In the method `build_adaptive_predictor_by_data`, the parameter `kernel_data` indicates all training and testing data for current predictor training. The value of `kernel_data` could either be an instance of Dict generated by `build_initial_predictor_by_data` or `build_adaptive_predictor_by_data`, or be a instance of Tuple such as:

```python
config_json_file = [
f'{workspace}/predictor_build/results/{kernel_type}_prior.json',
f'{workspace}/predictor_build/results/{kernel_type}_finegrained1.json',
f'{workspace}/predictor_build/results/{kernel_type}_finegrained2.json'
]
latency_json_file = [
f'{workspace}/predictor_build/results/profiled_{kernel_type}.json'
]
config_json_file = [f'{workspace}/predictor_build/results/{kernel_type}_prior.json'] # Add all needed json files name in the list
latency_json_file = [f'{workspace}/predictor_build/results/profiled_{kernel_type}.json'] # Add all needed json files name in the list
kernel_data = (config_json_file, latency_json_file)
```

Expand Down
3 changes: 2 additions & 1 deletion docs/requirements/requirements_builder.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
tensorflow
tensorflow==2.7.0
# torch==1.10.0
serial
pure-python-adb
typing
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
"source": [
"# initialize builder config with workspace\n",
"from nn_meter.builder import builder_config\n",
"workspace = \"/data1/jiahang/working/release2test/\"\n",
"workspace = \"/path/to/workspace/\"\n",
"builder_config.init(workspace) "
]
},
Expand Down
2 changes: 1 addition & 1 deletion nn_meter/builder/backends/interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ def convert_model(self, model_path, save_path, input_shape=None):
generated and used
"""
# convert model and save the converted model to path `converted_model`
converted_model = ...
converted_model = model_path
return converted_model

def profile(self, converted_model, metrics = ['latency'], input_shape = None, **kwargs):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,22 +53,40 @@ def get_flops_params(kernel_type, config):
def collect_kernel_data(kernel_data, predict_label = 'latency'):
if isinstance(kernel_data, dict):
return kernel_data

config, label = kernel_data
if isinstance(config, list):
config = collect_data(config)
else:
with open(config, 'r') as fp:
config = json.load(fp)

if isinstance(label, list):
label = collect_data(label)
if predict_label == 'latency':
from nn_meter.builder.backend_meta.utils import read_profiled_results
label = read_profiled_results(label)
return (config, label)
else:
with open(label, 'r') as fp:
label = json.load(fp)
if predict_label == 'latency':
from nn_meter.builder.backend_meta.utils import read_profiled_results
label = read_profiled_results(label)

for modules in config.keys():
for model_id in config[modules].keys():
try:
config[modules][model_id][predict_label] = label[modules][model_id][predict_label]
except:
pass

return config


def collect_data(file_list):
file_list_copy = file_list[:]

from ...utils import merge_info
data = file_list.pop(0)
data = file_list_copy.pop(0)
with open(data, 'r') as fp:
data = json.load(fp)
for file in file_list:
for file in file_list_copy:
data = merge_info(new_info=file, prev_info=data)
return data
6 changes: 3 additions & 3 deletions nn_meter/builder/nn_meter_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -231,25 +231,25 @@ def build_predictor_for_kernel(kernel_type, backend, init_sample_num = 1000, fin


def build_initial_predictor_by_data(kernel_type, backend = None, init_sample_num = 20, error_threshold = 0.1, mark = '', predict_label = "latency"):
return build_predictor_for_kernel(kernel_type, backend, init_sample_num=init_sample_num, iteration=1, error_threshold=error_threshold, predict_label=predict_label, mark=f'{mark}')
return build_predictor_for_kernel(kernel_type, backend, init_sample_num=init_sample_num, iteration=1, error_threshold=error_threshold, predict_label=predict_label, mark=mark)


def build_adaptive_predictor_by_data(kernel_type, kernel_data, backend = None, finegrained_sample_num = 20, error_threshold = 0.1, mark = '', predict_label = "latency"):
""" Run adaptive sampler in one iteration based
"""
workspace_path = builder_config.get('WORKSPACE', 'predbuild')
save_path = os.path.join(workspace_path, "results")
mark = 'finegrained' if mark == "" else f"finegrained_{mark}"

from nn_meter.builder.kernel_predictor_builder import build_predictor_by_data, collect_kernel_data
_, _, error_configs = build_predictor_by_data(kernel_type, kernel_data, backend = backend, error_threshold=error_threshold, save_path=None, predict_label=predict_label)
new_kernel_data = sample_and_profile_kernel_data(kernel_type, finegrained_sample_num, backend,
sampling_mode='finegrained', configs=error_configs, mark=mark)

# merge finegrained data with previous data and build new regression model
mark = mark if mark == "" else "_" + mark
kernel_data = merge_info(new_info=new_kernel_data, prev_info=collect_kernel_data(kernel_data))
predictor, acc10, error_configs = build_predictor_by_data(kernel_type, kernel_data, backend, error_threshold=error_threshold,
mark=f'finegrained{mark}', save_path=save_path, predict_label=predict_label)
mark=mark, save_path=save_path, predict_label=predict_label)
logging.keyinfo(f'{mark}: acc10 {acc10}, error_configs number: {len(error_configs)}')
return predictor, kernel_data

Expand Down
5 changes: 4 additions & 1 deletion tests/unit_test/test_nn_modules.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

if __name__ == '__main__':
config = {
"HW": 1,
"HW": 24,
"CIN": 144,
"COUT": 32,
"KERNEL_SIZE": 1,
Expand All @@ -19,9 +19,12 @@
"CIN3": 12,
"CIN4": 12
}
# test tersorflow kernels, tensorflow==2.7.0 or 2.6.0 is needed
from nn_meter.builder.nn_modules.tf_networks import blocks
for kernel in kernels:
getattr(blocks, kernel)(config).test_block()

# test torch kernels, torch==1.10.0 or 1.9.0 is needed
from nn_meter.builder.nn_modules.torch_networks import blocks
for kernel in kernels:
getattr(blocks, kernel)(config).test_block()

0 comments on commit e66d7ef

Please sign in to comment.