Skip to content

Commit

Permalink
[Feature] support debug mode
Browse files Browse the repository at this point in the history
  • Loading branch information
BIGWangYuDong committed Nov 5, 2023
1 parent 0ac44e1 commit 623686d
Show file tree
Hide file tree
Showing 5 changed files with 86 additions and 4 deletions.
2 changes: 1 addition & 1 deletion configs/detection/_base_/default_runtime.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
timer=dict(type='IterTimerHook'),
logger=dict(type='LoggerHook', interval=50),
param_scheduler=dict(type='ParamSchedulerHook'),
checkpoint=dict(type='CheckpointHook', interval=1, max_keep_ckpts=1),
checkpoint=dict(type='CheckpointHook', interval=1),
sampler_seed=dict(type='DistSamplerSeedHook'),
visualization=dict(type='DetVisualizationHook'))

Expand Down
3 changes: 2 additions & 1 deletion lqit/utils/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from .logger import get_caller_name, log_img_scale, print_colored_log
from .memory import AvoidCUDAOOM, AvoidOOM
from .process_debug import process_debug_mode
from .setup_env import (register_all_modules, setup_cache_size_limit_of_dynamo,
setup_multi_processes)
from .typing_utils import (ConfigType, InstanceList, MultiConfig,
Expand All @@ -11,5 +12,5 @@
'ConfigType', 'InstanceList', 'MultiConfig', 'OptConfigType',
'OptInstanceList', 'OptMultiConfig', 'OptPixelList', 'PixelList',
'RangeType', 'get_caller_name', 'log_img_scale', 'AvoidCUDAOOM',
'AvoidOOM', 'setup_cache_size_limit_of_dynamo'
'AvoidOOM', 'setup_cache_size_limit_of_dynamo', 'process_debug_mode'
]
52 changes: 52 additions & 0 deletions lqit/utils/process_debug.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
import warnings
from typing import Union

from mmengine.config import ConfigDict
from mmengine.dataset import (ClassBalancedDataset, ConcatDataset,
DefaultSampler, InfiniteSampler, RepeatDataset)

ConfigType = Union[dict, ConfigDict]


def process_debug_mode(cfg: ConfigType) -> ConfigType:
"""Process config for debug mode.
Args:
cfg (dict or :obj:`ConfigDict`): Config dict.
Returns:
dict or :obj:`ConfigDict`: Config dict.
"""

dataloader_list = ['train_dataloader', 'val_dataloader', 'test_dataloader']
for dataloader_name in dataloader_list:
dataset_type = cfg[dataloader_name]['dataset']['type']
if dataset_type in \
['ConcatDataset', 'RepeatDataset', 'ClassBalancedDataset',
ConcatDataset, RepeatDataset, ClassBalancedDataset]:
warnings.warn(f'{dataset_type} not support in debug mode, skip.')
else:
# set dataset.indices = 10
cfg[dataloader_name]['dataset']['indices'] = 10

# set num_workers = 0
cfg[dataloader_name]['num_workers'] = 0
cfg[dataloader_name]['persistent_workers'] = False

# set shuffle = False
if cfg[dataloader_name]['sampler']['type'] in \
['DefaultSampler', 'InfiniteSampler',
DefaultSampler, InfiniteSampler]:
cfg[dataloader_name]['sampler']['shuffle'] = False
# set seed = 0
cfg['randomness']['seed'] = 0
# set deterministic = True
cfg['randomness']['deterministic'] = True

# set log_level = 'DEBUG'
cfg['log_level'] = 'DEBUG'

# set max_keep_ckpts = 1
cfg['default_hooks']['checkpoint']['max_keep_ckpts'] = 1

return cfg
16 changes: 15 additions & 1 deletion tools/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
initialize_monitor_manager)
from lqit.common.utils.process_lark_hook import process_lark_hook
from lqit.registry import RUNNERS
from lqit.utils import setup_cache_size_limit_of_dynamo
from lqit.utils import process_debug_mode, setup_cache_size_limit_of_dynamo


# TODO: support fuse_conv_bn and format_only
Expand Down Expand Up @@ -58,6 +58,13 @@ def parse_args():
default='configs/lark/lark.py',
type=str,
help='lark bot config file path')
parser.add_argument(
'--debug',
default=False,
action='store_true',
help='Debug mode, used for code debugging, specifically, turning '
'data processing into single process (`num_workers`), adding '
'`indices=10` in datasets, and other debug-friendly settings.')
# When using PyTorch version >= 2.0.0, the `torch.distributed.launch`
# will pass the `--local-rank` parameter to `tools/train.py` instead
# of `--local_rank`.
Expand Down Expand Up @@ -99,6 +106,13 @@ def main(args):
if args.cfg_options is not None:
cfg.merge_from_dict(args.cfg_options)

# process debug mode if args.debug is True
if args.debug:
# force set args.lark = False
args.lark = False
# set necessary params for debug mode
cfg = process_debug_mode(cfg)

# work_dir is determined in this priority: CLI > segment in file > filename
if args.work_dir is not None:
# update configs according to CLI args if args.work_dir is not None
Expand Down
17 changes: 16 additions & 1 deletion tools/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,8 @@
get_error_message,
initialize_monitor_manager)
from lqit.common.utils.process_lark_hook import process_lark_hook
from lqit.utils import print_colored_log, setup_cache_size_limit_of_dynamo
from lqit.utils import (print_colored_log, process_debug_mode,
setup_cache_size_limit_of_dynamo)


def parse_args():
Expand Down Expand Up @@ -63,6 +64,13 @@ def parse_args():
default='configs/lark/lark.py',
type=str,
help='lark bot config file path')
parser.add_argument(
'--debug',
default=False,
action='store_true',
help='Debug mode, used for code debugging, specifically, turning '
'data processing into single process (`num_workers`), adding '
'`indices=10` in datasets, and other debug-friendly settings.')
# When using PyTorch version >= 2.0.0, the `torch.distributed.launch`
# will pass the `--local-rank` parameter to `tools/train.py` instead
# of `--local_rank`.
Expand All @@ -85,6 +93,13 @@ def main(args):
if args.cfg_options is not None:
cfg.merge_from_dict(args.cfg_options)

# process debug mode if args.debug is True
if args.debug:
# force set args.lark = False
args.lark = False
# set necessary params for debug mode
cfg = process_debug_mode(cfg)

# work_dir is determined in this priority: CLI > segment in file > filename
if args.work_dir is not None:
# update configs according to CLI args if args.work_dir is not None
Expand Down

0 comments on commit 623686d

Please sign in to comment.