Skip to content

Commit e4d28f3

Browse files
Add debug log support (modelscope#1226)
1 parent 73879a8 commit e4d28f3

File tree

4 files changed

+19
-13
lines changed

4 files changed

+19
-13
lines changed

swift/llm/utils/model.py

+5
Original file line numberDiff line numberDiff line change
@@ -910,9 +910,14 @@ def get_model_tokenizer_from_repo(model_dir: str,
910910
trust_remote_code=True,
911911
)
912912
else:
913+
logger.info(f'Model loading with args: model_dir: {model_dir},'
914+
f'torch_dtype: {torch_dtype},'
915+
f'model_kwargs: {model_kwargs}')
913916
with context:
914917
model = automodel_class.from_pretrained(
915918
model_dir, config=model_config, torch_dtype=torch_dtype, trust_remote_code=True, **model_kwargs)
919+
if hasattr(model, 'hf_device_map'):
920+
logger.debug(f'Model hf_device_map: {model.hf_device_map}')
916921
return model, tokenizer
917922

918923

swift/llm/utils/utils.py

+2-3
Original file line numberDiff line numberDiff line change
@@ -45,11 +45,10 @@
4545

4646
logger.handlers[0].setFormatter(logger_format)
4747
ms_logger.handlers[0].setFormatter(logger_format)
48+
log_level = os.getenv('LOG_LEVEL', 'INFO').upper()
4849
if is_local_master():
49-
logger.setLevel(logging.INFO)
50-
ms_logger.setLevel(logging.INFO)
50+
ms_logger.setLevel(log_level)
5151
else:
52-
logger.setLevel(logging.ERROR)
5352
ms_logger.setLevel(logging.ERROR)
5453

5554
os.environ['TOKENIZERS_PARALLELISM'] = 'true'

swift/utils/logger.py

+6-9
Original file line numberDiff line numberDiff line change
@@ -9,12 +9,7 @@
99
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
1010

1111

12-
def is_master():
13-
rank = int(os.getenv('RANK', -1))
14-
return rank in {-1, 0}
15-
16-
17-
def get_logger(log_file: Optional[str] = None, log_level: int = logging.INFO, file_mode: str = 'w'):
12+
def get_logger(log_file: Optional[str] = None, log_level: Optional[int] = None, file_mode: str = 'w'):
1813
""" Get logging logger
1914
2015
Args:
@@ -24,7 +19,9 @@ def get_logger(log_file: Optional[str] = None, log_level: int = logging.INFO, fi
2419
file_mode: Specifies the mode to open the file, if filename is
2520
specified (if filemode is unspecified, it defaults to 'w').
2621
"""
27-
22+
if log_level is None:
23+
log_level = os.getenv('LOG_LEVEL', 'INFO').upper()
24+
log_level = getattr(logging, log_level, logging.INFO)
2825
logger_name = __name__.split('.')[0]
2926
logger = logging.getLogger(logger_name)
3027
logger.propagate = False
@@ -47,7 +44,7 @@ def get_logger(log_file: Optional[str] = None, log_level: int = logging.INFO, fi
4744
handlers = [stream_handler]
4845

4946
if importlib.util.find_spec('torch') is not None:
50-
is_worker0 = is_master()
47+
is_worker0 = int(os.getenv('LOCAL_RANK', -1)) in {-1, 0}
5148
else:
5249
is_worker0 = True
5350

@@ -76,7 +73,7 @@ def add_file_handler_if_needed(logger, log_file, file_mode, log_level):
7673
return
7774

7875
if importlib.util.find_spec('torch') is not None:
79-
is_worker0 = is_master()
76+
is_worker0 = int(os.getenv('LOCAL_RANK', -1)) in {-1, 0}
8077
else:
8178
is_worker0 = True
8279

swift/utils/torch_utils.py

+6-1
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
from torch.nn import Module
1414
from transformers.utils import is_torch_npu_available, strtobool
1515

16-
from .logger import get_logger, is_master
16+
from .logger import get_logger
1717

1818
logger = get_logger()
1919

@@ -82,6 +82,11 @@ def is_local_master():
8282
return local_rank in {-1, 0}
8383

8484

85+
def is_master():
86+
rank = int(os.getenv('RANK', -1))
87+
return rank in {-1, 0}
88+
89+
8590
def use_torchacc() -> bool:
8691
return strtobool(os.getenv('USE_TORCHACC', '0'))
8792

0 commit comments

Comments
 (0)