Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Moved Apply NN to images project to a engine 2.0 #44

Merged
merged 19 commits into from
Feb 12, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .flake8
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[flake8]
max-line-length = 100
ignore = E203, E501, W503, E722, W605
2 changes: 1 addition & 1 deletion .github/workflows/publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,4 +16,4 @@ jobs:
RELEASE_VERSION: ""
RELEASE_DESCRIPTION: ""
RELEASE_TYPE: "publish"
SUBAPP_PATHS: "annotation-tool, project-dataset"
SUBAPP_PATHS: "annotation-tool, project_dataset"
2 changes: 1 addition & 1 deletion .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,4 +22,4 @@ jobs:
RELEASE_VERSION: "${{ github.event.release.tag_name }}"
RELEASE_DESCRIPTION: "${{ github.event.release.name }}"
RELEASE_TYPE: "release"
SUBAPP_PATHS: "annotation-tool, project-dataset"
SUBAPP_PATHS: "annotation-tool, project_dataset"
2 changes: 1 addition & 1 deletion .github/workflows/release_branch.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,4 @@ jobs:
RELEASE_VERSION: "${{ github.ref_name }}"
RELEASE_DESCRIPTION: "'${{ github.ref_name }}' branch release"
RELEASE_TYPE: "release-branch"
SUBAPP_PATHS: "annotation-tool, project-dataset"
SUBAPP_PATHS: "annotation-tool, project_dataset"
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,4 @@ shared_utils/__pycache__
project-dataset/src/__pycache__
.DS_Store
debug
__pycache__/
24 changes: 23 additions & 1 deletion .vscode/launch.json
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,28 @@
"env": {
"PYTHONPATH": "${workspaceFolder}${pathSeparator}${env:PYTHONPATH}"
}
}
},
{
"name": "UI app",
"type": "python",
"request": "launch",
"module": "uvicorn",
"args": [
"project_dataset.src.main:app",
"--host",
"0.0.0.0",
"--port",
"8000",
"--ws",
"websockets",
"--reload"
],
"jinja": true,
"justMyCode": true,
"env": {
"PYTHONPATH": "${workspaceFolder}:${PYTHONPATH}",
"LOG_LEVEL": "DEBUG"
}
},
]
}
3 changes: 3 additions & 0 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,9 @@
},
"[python]": {
"editor.defaultFormatter": "ms-python.black-formatter",
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.organizeImports": "explicit"
},
"debug.inlineValues": "off",
"python.analysis.typeCheckingMode": "off",
Expand Down
28 changes: 28 additions & 0 deletions create_venv.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
#!/bin/bash

# Checking if .venv dir already exists.
if [ -d ".venv" ]; then
echo "VENV dir (.venv) already exists, it will be removed."
rm -rf .venv
fi

echo "VENV will be created"

# Checking if python3.8 is available in PATH.
if command -v python3.8 &>/dev/null; then
python_executable="python3.8" && \
echo "Python 3.8 found, it will be used for creating VENV dir."
else
python_executable="python3" && \
echo "Python 3.8 not found, default python3 will be used for creating VENV dir."
fi

# Creating VENV dir with selected python executable.
$python_executable -m venv .venv && \
source .venv/bin/activate && \

# Installing requirements from requirements.txt.
echo "Install dev_requirements..." && \
pip3 install -r dev_requirements.txt && \
echo "Requirements have been successfully installed, VENV ready." && \
deactivate
1 change: 0 additions & 1 deletion project-dataset/.gitignore

This file was deleted.

5 changes: 0 additions & 5 deletions project-dataset/secret_debug.env

This file was deleted.

2 changes: 2 additions & 0 deletions project_dataset/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
/secret_debug.env
temp/
File renamed without changes.
6 changes: 3 additions & 3 deletions project-dataset/config.json → project_dataset/config.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
{
"name": "Apply NN to Images Project",
"type": "app",
"version": "2.0.0",
"categories": [
"neural network",
"images",
Expand All @@ -11,10 +12,9 @@
"labeling"
],
"description": "NN Inference on images in project or dataset",
"docker_image": "supervisely/labeling:6.73.22",
"docker_image": "supervisely/labeling:6.72.234",
"min_instance_version": "6.8.73",
"main_script": "project-dataset/src/main.py",
"gui_template": "project-dataset/src/gui.html",
"entrypoint": "python -m uvicorn project_dataset.src.main:app --host 0.0.0.0 --port 8000",
"task_location": "workspace_tasks",
"isolate": true,
"headless": false,
Expand Down
File renamed without changes.
4 changes: 4 additions & 0 deletions project_dataset/local.env
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
TEAM_ID=448
WORKSPACE_ID=690
PROJECT_ID=34969
DATASET_ID=85450
53 changes: 53 additions & 0 deletions project_dataset/src/globals.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
import os

import supervisely as sly
from dotenv import load_dotenv

ABSOLUTE_PATH = os.path.dirname(os.path.abspath(__file__))
PARENT_DIR = os.path.dirname(ABSOLUTE_PATH)
STATIC_DIR = os.path.join(PARENT_DIR, "temp")
sly.fs.mkdir(STATIC_DIR)

if sly.is_development():
load_dotenv(os.path.join(PARENT_DIR, "local.env"))
load_dotenv(os.path.expanduser("~/supervisely.env"))

team_id = sly.env.team_id()
workspace_id = sly.env.workspace_id()
project_id = sly.env.project_id(raise_not_found=False)
dataset_id = sly.env.dataset_id(raise_not_found=False)

sly.logger.info(
f"TEAM_ID: {team_id}, WORKSPACE_ID: {workspace_id}, PROJECT_ID: {project_id}, DATASET_ID: {dataset_id}"
)

api = sly.Api.from_env()
if dataset_id:
dataset_info = api.dataset.get_info_by_id(dataset_id)
project_id = dataset_info.project_id

# region ui-settings
selected_project = None
selected_datasets = None
model_session_id = None
model_meta = None
inference_settings = None
# endregion

# region ui-consants
deployed_nn_tags = ["deployed_nn"]
inference_modes = ["full image", "sliding window"]
add_predictions_modes = ["merge with existing labels", "replace existing labels"]
# endregion

# region caches
input_images = None
project_info = api.project.get_info_by_id(project_id)
project_meta = sly.ProjectMeta.from_json(api.project.get_meta(project_id))
# endregion

# region sliding window parameters
det_model_meta = None
model_info = None
inference_request_uuid = None
# endregion
23 changes: 23 additions & 0 deletions project_dataset/src/main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import supervisely as sly
from supervisely.app.widgets import Container

import project_dataset.src.globals as g
import project_dataset.src.ui.connect_nn as connect_nn
import project_dataset.src.ui.inference_preview as inference_preview
import project_dataset.src.ui.inference_settings as inference_settings
import project_dataset.src.ui.input_data as input_data
import project_dataset.src.ui.nn_info as nn_info
import project_dataset.src.ui.output_data as output_data

layout = Container(
widgets=[
input_data.card,
connect_nn.card,
nn_info.card,
inference_settings.card,
inference_preview.card,
output_data.card,
],
)

app = sly.Application(layout=layout, static_dir=g.STATIC_DIR)
152 changes: 152 additions & 0 deletions project_dataset/src/ui/connect_nn.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,152 @@
import supervisely as sly
import yaml
from supervisely.app.widgets import Button, Card, Container, ModelInfo, SelectAppSession, Text

import project_dataset.src.globals as g
import project_dataset.src.ui.inference_preview as inference_preview
import project_dataset.src.ui.inference_settings as inference_settings
import project_dataset.src.ui.nn_info as nn_info
import project_dataset.src.ui.output_data as output_data

select_session = SelectAppSession(g.team_id, g.deployed_nn_tags)
connect_button = Button("Connect", icon="zmdi zmdi-check")
disconnect_button = Button("Disconnect", icon="zmdi zmdi-close")
disconnect_button.hide()
error_text = Text(status="warning")
error_text.hide()

model_info = ModelInfo()
model_info.hide()

card = Card(
"2️⃣ Choose model",
"Select a deployed neural network to start and click on Select button.",
content=Container([select_session, connect_button, model_info, disconnect_button, error_text]),
collapsable=True,
lock_message="Select a project or a dataset on step 1️⃣.",
)
card.collapse()
card.lock()


@connect_button.click
def model_selected() -> None:
"""Connects to the selected model session and changes the UI state."""
g.model_session_id = select_session.get_selected_id()
if g.model_session_id is None:
error_text.text = "No model was selected, please select a model and try again."
error_text.show()
return

connect_status = connect_to_model()
if not connect_status:
error_text.text = (
"Couldn't connect to the model. Make sure that model is deployed and try again."
)
error_text.show()
return

g.model_meta = get_model_meta()
g.inference_settings = get_inference_settings()
inference_settings.additional_settings.set_text(g.inference_settings["settings"])

error_text.hide()
model_info.set_session_id(g.model_session_id)
model_info.show()
disconnect_button.show()

connect_button.hide()
select_session.hide()

nn_info.load_classes()
nn_info.load_tags()

nn_info.card.unlock()
nn_info.card.uncollapse()

inference_settings.card.unlock()
inference_settings.card.uncollapse()

inference_preview.card.unlock()
inference_preview.card.uncollapse()

inference_preview.random_image_checkbox.enable()
inference_preview.preview_button.enable()

output_data.card.unlock()
output_data.card.uncollapse()


@disconnect_button.click
def model_changed() -> None:
"""Changes the UI state when the model is changed."""
connect_button.show()
select_session.show()

disconnect_button.hide()
g.model_session_id = None
g.model_meta = None
sly.logger.info(f"Change button was clicked. Model session: {g.model_session_id}")
model_info.hide()

nn_info.card.lock()
nn_info.card.collapse()

inference_settings.card.lock()
inference_settings.card.collapse()

inference_preview.card.lock()
inference_preview.card.collapse()

inference_preview.random_image_checkbox.disable()
inference_preview.preview_button.disable()

output_data.card.lock()
output_data.card.collapse()


def connect_to_model() -> bool:
"""Connects to the selected model session.

:return: True if the connection was successful, False otherwise.
:rtype: bool
"""
try:
session_info = g.api.task.send_request(g.model_session_id, "get_session_info", data={})
sly.logger.info(f"Connected to model session: {session_info}")
return True
except Exception as e:
sly.logger.warning(
f"Couldn't get model info. Make sure that model is deployed and try again. Reason: {e}"
)
return False


def get_model_meta() -> sly.ProjectMeta:
"""Returns model meta in Supervisely format.

:return: Model meta in Supervisely format.
:rtype: sly.ProjectMeta
"""
meta_json = g.api.task.send_request(g.model_session_id, "get_output_classes_and_tags", data={})
return sly.ProjectMeta.from_json(meta_json)


def get_inference_settings() -> str:
"""Returns custom inference settings for the model.
The settings are returned as a string in YAML format.

:return: Custom inference settings for the model.
:rtype: str
"""
inference_settings = g.api.task.send_request(
g.model_session_id, "get_custom_inference_settings", data={}
)
if inference_settings["settings"] is None or len(inference_settings["settings"]) == 0:
inference_settings["settings"] = ""
sly.logger.info("Model doesn't support custom inference settings.")
elif isinstance(inference_settings["settings"], dict):
inference_settings["settings"] = yaml.dump(
inference_settings["settings"], allow_unicode=True
)
return inference_settings
Loading
Loading