Skip to content

Commit

Permalink
Merge branch 'main' into uc-not-enabled-error
Browse files Browse the repository at this point in the history
  • Loading branch information
irenedea authored Oct 24, 2024
2 parents 3af7f83 + 08702d2 commit 5a3efb4
Show file tree
Hide file tree
Showing 6 changed files with 18 additions and 9 deletions.
1 change: 1 addition & 0 deletions llmfoundry/callbacks/hf_checkpointer.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,7 @@ def _register_model_with_run_id_multiprocess(
logging.basicConfig(
format=
f'%(asctime)s: rank{dist.get_global_rank()}[%(process)d][%(threadName)s]: %(levelname)s: %(name)s: %(message)s',
force=True,
)
logging.getLogger('composer').setLevel(composer_logging_level)

Expand Down
1 change: 1 addition & 0 deletions llmfoundry/command_utils/data_prep/convert_text_to_mds.py
Original file line number Diff line number Diff line change
Expand Up @@ -515,6 +515,7 @@ def _configure_logging(logging_level: str):
logging.basicConfig(
format=
f'%(asctime)s: [%(process)d][%(threadName)s]: %(levelname)s: %(name)s: %(message)s',
force=True,
)
logging_level = logging_level.upper()
logging.getLogger('llmfoundry').setLevel(logging_level)
Expand Down
1 change: 1 addition & 0 deletions llmfoundry/command_utils/eval.py
Original file line number Diff line number Diff line change
Expand Up @@ -272,6 +272,7 @@ def evaluate(cfg: DictConfig) -> tuple[list[Trainer], pd.DataFrame]:
# 2022-06-29 11:22:26,152: rank0[822018][MainThread]: INFO: Message here
format=
f'%(asctime)s: rank{dist.get_global_rank()}[%(process)d][%(threadName)s]: %(levelname)s: %(name)s: %(message)s',
force=True,
)
logging.getLogger('llmfoundry').setLevel(
eval_config.python_log_level.upper(),
Expand Down
13 changes: 7 additions & 6 deletions llmfoundry/command_utils/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -227,6 +227,7 @@ def train(cfg: DictConfig) -> Trainer:
# 2022-06-29 11:22:26,152: rank0[822018][MainThread]: INFO: Message here
format=
f'%(asctime)s: rank{dist.get_global_rank()}[%(process)d][%(threadName)s]: %(levelname)s: %(name)s: %(message)s',
force=True,
)
logging.getLogger('llmfoundry').setLevel(
train_cfg.python_log_level.upper(),
Expand Down Expand Up @@ -311,20 +312,20 @@ def train(cfg: DictConfig) -> Trainer:
eval_gauntlet_config = train_cfg.eval_gauntlet or train_cfg.eval_gauntlet_str

# Optional parameters will be set to default values if not specified.
env_run_name: Optional[str] = os.environ.get('RUN_NAME', None)
run_name: str = (
train_cfg.run_name if train_cfg.run_name else env_run_name
) or 'llm'
run_name: Optional[
str] = train_cfg.run_name if train_cfg.run_name else os.environ.get(
'RUN_NAME',
None,
)
is_state_dict_sharded: bool = (
fsdp_config.get('state_dict_type', 'full') == 'sharded'
) if fsdp_config else False
save_latest_filename: str = train_cfg.save_latest_filename if train_cfg.save_latest_filename else 'latest-sharded-rank{rank}' if is_state_dict_sharded else 'latest-rank{rank}.pt'
save_filename: str = train_cfg.save_filename if train_cfg.save_filename else 'ep{epoch}-ba{batch}-rank{rank}.pt'

# Enable autoresume from model checkpoints if possible
is_user_set_run_name: bool = train_cfg.run_name is not None or env_run_name is not None
autoresume_default: bool = False
if is_user_set_run_name and \
if run_name is not None and \
train_cfg.save_folder is not None \
and not train_cfg.save_overwrite \
and not train_cfg.save_weights_only:
Expand Down
6 changes: 5 additions & 1 deletion scripts/inference/endpoint_generate.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,11 @@

from llmfoundry.utils import prompt_files as utils

logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s')
logging.basicConfig(
format=
f'%(asctime)s: [%(process)d][%(threadName)s]: %(levelname)s: %(name)s: %(message)s',
force=True,
)
log = logging.getLogger(__name__)

ENDPOINT_API_KEY_ENV: str = 'ENDPOINT_API_KEY'
Expand Down
5 changes: 3 additions & 2 deletions scripts/misc/download_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,9 @@
HF_TOKEN_ENV_VAR = 'HF_TOKEN'

logging.basicConfig(
format=f'%(asctime)s: %(levelname)s: %(name)s: %(message)s',
level=logging.INFO,
format=
f'%(asctime)s: [%(process)d][%(threadName)s]: %(levelname)s: %(name)s: %(message)s',
force=True,
)
log = logging.getLogger(__name__)

Expand Down

0 comments on commit 5a3efb4

Please sign in to comment.