16
16
# See the License for the specific language governing permissions
17
17
# and limitations under the License.
18
18
19
- import inspect
19
+ import io
20
20
import json
21
21
import os
22
- import subprocess # nosec
23
- import sys
24
22
import tempfile
25
- from shutil import copyfile , copytree
26
23
from typing import Any , Dict , List , Optional
27
24
from zipfile import ZipFile
28
25
29
26
import numpy as np
27
+ import ote_anomalib .exportable_code
30
28
from addict import Dict as ADDict
31
29
from anomalib .deploy import OpenVINOInferencer
32
30
from anomalib .post_processing import anomaly_map_to_color_map
41
39
contains_anomalous_images ,
42
40
split_local_global_resultset ,
43
41
)
44
- from ote_anomalib .exportable_code import (
45
- AnomalyBase ,
46
- AnomalyClassification ,
47
- AnomalyDetection ,
48
- AnomalySegmentation ,
49
- )
50
42
from ote_anomalib .logging import get_logger
51
43
from ote_sdk .entities .datasets import DatasetEntity
52
44
from ote_sdk .entities .inference_parameters import (
@@ -307,11 +299,17 @@ def optimize(
307
299
if get_nodes_by_type (model , ["FakeQuantize" ]):
308
300
raise RuntimeError ("Model is already optimized by POT" )
309
301
302
+ if optimization_parameters is not None :
303
+ optimization_parameters .update_progress (10 )
304
+
310
305
engine = IEEngine (config = ADDict ({"device" : "CPU" }), data_loader = data_loader , metric = None )
311
306
pipeline = create_pipeline (algo_config = self ._get_optimization_algorithms_configs (), engine = engine )
312
307
compressed_model = pipeline .run (model )
313
308
compress_model_weights (compressed_model )
314
309
310
+ if optimization_parameters is not None :
311
+ optimization_parameters .update_progress (90 )
312
+
315
313
with tempfile .TemporaryDirectory () as tempdir :
316
314
save_model (compressed_model , tempdir , model_name = "model" )
317
315
self .__load_weights (path = os .path .join (tempdir , "model.xml" ), output_model = output_model , key = "openvino.xml" )
@@ -330,6 +328,10 @@ def optimize(
330
328
self .task_environment .model = output_model
331
329
self .inferencer = self .load_inferencer ()
332
330
331
+ if optimization_parameters is not None :
332
+ optimization_parameters .update_progress (100 )
333
+ logger .info ("POT optimization completed" )
334
+
333
335
def load_inferencer (self ) -> OpenVINOInferencer :
334
336
"""
335
337
Create the OpenVINO inferencer object
@@ -422,57 +424,26 @@ def deploy(self, output_model: ModelEntity) -> None:
422
424
423
425
task_type = str (self .task_type ).lower ()
424
426
425
- if self .task_type == TaskType .ANOMALY_CLASSIFICATION :
426
- selected_class = AnomalyClassification
427
- elif self .task_type == TaskType .ANOMALY_DETECTION :
428
- selected_class = AnomalyDetection
429
- elif self .task_type == TaskType .ANOMALY_SEGMENTATION :
430
- selected_class = AnomalySegmentation
431
- else :
432
- raise ValueError (
433
- f"{ self .task_type } is not supported. "
434
- "Only Anomaly <Classification, Detection, Segmentation> are supported"
435
- )
436
-
437
427
parameters ["type_of_model" ] = task_type
438
428
parameters ["converter_type" ] = task_type .upper ()
439
429
parameters ["model_parameters" ] = self ._get_openvino_configuration ()
440
- name_of_package = "demo_package"
441
-
442
- with tempfile .TemporaryDirectory () as tempdir :
443
- copyfile (os .path .join (work_dir , "setup.py" ), os .path .join (tempdir , "setup.py" ))
444
- copyfile (os .path .join (work_dir , "requirements.txt" ), os .path .join (tempdir , "requirements.txt" ))
445
- copytree (os .path .join (work_dir , name_of_package ), os .path .join (tempdir , name_of_package ))
446
- config_path = os .path .join (tempdir , name_of_package , "config.json" )
447
- with open (config_path , "w" , encoding = "utf-8" ) as file :
448
- json .dump (parameters , file , ensure_ascii = False , indent = 4 )
449
-
450
- copyfile (inspect .getfile (selected_class ), os .path .join (tempdir , name_of_package , "model.py" ))
451
- copyfile (inspect .getfile (AnomalyBase ), os .path .join (tempdir , name_of_package , "base.py" ))
452
-
453
- # create wheel package
454
- subprocess .run (
455
- [
456
- sys .executable ,
457
- os .path .join (tempdir , "setup.py" ),
458
- "bdist_wheel" ,
459
- "--dist-dir" ,
460
- tempdir ,
461
- "clean" ,
462
- "--all" ,
463
- ],
464
- check = True ,
465
- )
466
- wheel_file_name = [f for f in os .listdir (tempdir ) if f .endswith (".whl" )][0 ]
467
-
468
- with ZipFile (os .path .join (tempdir , "openvino.zip" ), "w" ) as arch :
469
- arch .writestr (os .path .join ("model" , "model.xml" ), self .task_environment .model .get_data ("openvino.xml" ))
470
- arch .writestr (os .path .join ("model" , "model.bin" ), self .task_environment .model .get_data ("openvino.bin" ))
471
- arch .write (os .path .join (tempdir , "requirements.txt" ), os .path .join ("python" , "requirements.txt" ))
472
- arch .write (os .path .join (work_dir , "README.md" ), os .path .join ("python" , "README.md" ))
473
- arch .write (os .path .join (work_dir , "LICENSE" ), os .path .join ("python" , "LICENSE" ))
474
- arch .write (os .path .join (work_dir , "demo.py" ), os .path .join ("python" , "demo.py" ))
475
- arch .write (os .path .join (tempdir , wheel_file_name ), os .path .join ("python" , wheel_file_name ))
476
- with open (os .path .join (tempdir , "openvino.zip" ), "rb" ) as output_arch :
477
- output_model .exportable_code = output_arch .read ()
430
+ zip_buffer = io .BytesIO ()
431
+ with ZipFile (zip_buffer , "w" ) as arch :
432
+ # model files
433
+ arch .writestr (os .path .join ("model" , "model.xml" ), self .task_environment .model .get_data ("openvino.xml" ))
434
+ arch .writestr (os .path .join ("model" , "model.bin" ), self .task_environment .model .get_data ("openvino.bin" ))
435
+ arch .writestr (os .path .join ("model" , "config.json" ), json .dumps (parameters , ensure_ascii = False , indent = 4 ))
436
+ # model_wrappers files
437
+ for root , _ , files in os .walk (os .path .dirname (ote_anomalib .exportable_code .__file__ )):
438
+ for file in files :
439
+ file_path = os .path .join (root , file )
440
+ arch .write (
441
+ file_path , os .path .join ("python" , "model_wrappers" , file_path .split ("exportable_code/" )[1 ])
442
+ )
443
+ # other python files
444
+ arch .write (os .path .join (work_dir , "requirements.txt" ), os .path .join ("python" , "requirements.txt" ))
445
+ arch .write (os .path .join (work_dir , "LICENSE" ), os .path .join ("python" , "LICENSE" ))
446
+ arch .write (os .path .join (work_dir , "README.md" ), os .path .join ("python" , "README.md" ))
447
+ arch .write (os .path .join (work_dir , "demo.py" ), os .path .join ("python" , "demo.py" ))
448
+ output_model .exportable_code = zip_buffer .getvalue ()
478
449
logger .info ("Deployment completed." )
0 commit comments