-
Notifications
You must be signed in to change notification settings - Fork 4.7k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
refactor ray integration, support save ckpt
- Loading branch information
Showing
18 changed files
with
215 additions
and
161 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,48 @@ | ||
### model | ||
model_name_or_path: meta-llama/Meta-Llama-3-8B-Instruct # or use local absolute path | ||
trust_remote_code: true | ||
|
||
### method | ||
stage: sft | ||
do_train: true | ||
finetuning_type: lora | ||
lora_target: all | ||
|
||
### dataset | ||
dataset: identity,alpaca_en_demo | ||
dataset_dir: REMOTE:llamafactory/demo_data # or use local absolute path | ||
template: llama3 | ||
cutoff_len: 2048 | ||
max_samples: 1000 | ||
overwrite_cache: true | ||
preprocessing_num_workers: 16 | ||
|
||
### output | ||
output_dir: tmp_dir | ||
logging_steps: 10 | ||
save_steps: 500 | ||
plot_loss: true | ||
overwrite_output_dir: true | ||
|
||
### train | ||
per_device_train_batch_size: 1 | ||
gradient_accumulation_steps: 8 | ||
learning_rate: 1.0e-4 | ||
num_train_epochs: 3.0 | ||
lr_scheduler_type: cosine | ||
warmup_ratio: 0.1 | ||
bf16: true | ||
ddp_timeout: 180000000 | ||
|
||
### eval | ||
val_size: 0.1 | ||
per_device_eval_batch_size: 1 | ||
eval_strategy: steps | ||
eval_steps: 500 | ||
|
||
### ray | ||
ray_run_name: llama3_8b_sft_lora | ||
ray_num_workers: 4 # number of GPUs to use | ||
resources_per_worker: | ||
GPU: 1 | ||
placement_strategy: PACK |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,48 @@ | ||
import json | ||
from dataclasses import dataclass, field | ||
from typing import Literal, Optional, Union | ||
|
||
from transformers import Seq2SeqTrainingArguments | ||
from transformers.training_args import _convert_str_dict | ||
|
||
from ..extras.misc import use_ray | ||
|
||
|
||
@dataclass | ||
class RayArguments: | ||
r""" | ||
Arguments pertaining to the Ray training. | ||
""" | ||
|
||
ray_run_name: Optional[str] = field( | ||
default=None, | ||
metadata={"help": "The training results will be saved at `saves/ray_run_name`."}, | ||
) | ||
ray_num_workers: int = field( | ||
default=1, | ||
metadata={"help": "The number of workers for Ray training. Default is 1 worker."}, | ||
) | ||
resources_per_worker: Union[dict, str] = field( | ||
default_factory=lambda: {"GPU": 1}, | ||
metadata={"help": "The resources per worker for Ray training. Default is to use 1 GPU per worker."}, | ||
) | ||
placement_strategy: Literal["SPREAD", "PACK", "STRICT_SPREAD", "STRICT_PACK"] = field( | ||
default="PACK", | ||
metadata={"help": "The placement strategy for Ray training. Default is PACK."}, | ||
) | ||
|
||
def __post_init__(self): | ||
self.use_ray = use_ray() | ||
if isinstance(self.resources_per_worker, str) and self.resources_per_worker.startswith("{"): | ||
self.resources_per_worker = _convert_str_dict(json.loads(self.resources_per_worker)) | ||
|
||
|
||
@dataclass | ||
class TrainingArguments(RayArguments, Seq2SeqTrainingArguments): | ||
r""" | ||
Arguments pertaining to the trainer. | ||
""" | ||
|
||
def __post_init__(self): | ||
Seq2SeqTrainingArguments.__post_init__(self) | ||
RayArguments.__post_init__(self) |
Empty file.
Empty file.
Oops, something went wrong.