Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

rename module view to lightning view when enable pytorch lightning #539

Merged
merged 1 commit into from
Feb 21, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 5 additions & 2 deletions tb_plugin/fe/src/app.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,8 @@ export enum Views {
Trace = 'Trace',
Distributed = 'Distributed',
Memory = 'Memory',
Module = 'Module'
Module = 'Module',
Lightning = 'Lightning'
}

const ViewNames = {
Expand All @@ -58,7 +59,8 @@ const ViewNames = {
[Views.Trace]: Views.Trace,
[Views.Distributed]: Views.Distributed,
[Views.Memory]: Views.Memory,
[Views.Module]: Views.Module
[Views.Module]: Views.Module,
[Views.Lightning]: Views.Lightning
}

const drawerWidth = 340
Expand Down Expand Up @@ -407,6 +409,7 @@ export const App = () => {
case Views.Memory:
return <MemoryView run={run} worker={worker} span={span} />
case Views.Module:
case Views.Lightning:
return <ModuleView run={run} worker={worker} span={span} />
}
} else {
Expand Down
1 change: 1 addition & 0 deletions tb_plugin/torch_tb_profiler/consts.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
DISTRIBUTED_VIEW = View(5, 'distributed', 'Distributed')
MEMORY_VIEW = View(6, 'memory', 'Memory')
MODULE_VIEW = View(7, 'module', 'Module')
LIGHTNING_VIEW = View(8, 'lightning', 'Lightning')

TOOLTIP_GPU_UTIL = \
'GPU Utilization:\n' \
Expand Down
7 changes: 4 additions & 3 deletions tb_plugin/torch_tb_profiler/profiler/module_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@
from collections import namedtuple
from typing import Dict, Generator, Iterable, List, Optional, Set, Tuple, Union

from .node import (DataLoaderNode, ModuleNode, OperatorNode, PLModuleNode,
ProfilerStepNode, is_operator_node)
from .node import (DataLoaderNode, ModuleNode, OperatorNode, OptimizerNode,
PLModuleNode, ProfilerStepNode, is_operator_node)
from .trace import BaseEvent, EventTypes, PLModuleEvent, PythonFunctionEvent


Expand Down Expand Up @@ -186,7 +186,8 @@ def _aggregate_modules(modules: Iterable[Union[ModuleNode, PLModuleNode]]) -> Di
def _get_node_list(tid2tree: Dict[int, OperatorNode], node_class) -> Generator[OperatorNode, None, None]:
"""Get all node with node_class from the operator tree"""
def traverse_node(node):
if type(node) not in (ProfilerStepNode, ModuleNode, OperatorNode, PLModuleNode, DataLoaderNode):
# Check OptimizerNode here because in PytorchLightning PLModuleNode is under OptimizerNoder.
if type(node) not in (ProfilerStepNode, ModuleNode, OperatorNode, OptimizerNode, PLModuleNode, DataLoaderNode):
guyang3532 marked this conversation as resolved.
Show resolved Hide resolved
return

if isinstance(node, node_class):
Expand Down
4 changes: 3 additions & 1 deletion tb_plugin/torch_tb_profiler/profiler/run_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,9 @@ def generate_run_profile(self):

profile_run.module_stats = aggegate_module_view(self.profile_data.tid2tree, self.profile_data.events)
profile_run.pl_module_stats = aggegate_pl_module_view(self.profile_data.tid2tree, self.profile_data.events)
if profile_run.module_stats or (profile_run.is_pytorch_lightning and profile_run.pl_module_stats):
if profile_run.is_pytorch_lightning and profile_run.pl_module_stats:
profile_run.views.append(consts.LIGHTNING_VIEW)
elif profile_run.module_stats:
profile_run.views.append(consts.MODULE_VIEW)

return profile_run
Expand Down
2 changes: 1 addition & 1 deletion tb_plugin/torch_tb_profiler/static/index.html

Large diffs are not rendered by default.