Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

patch the modeling files for remote code models #1872

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
67 changes: 67 additions & 0 deletions examples/deepseek-v2/fft-fsdp-16b.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
base_model: deepseek-ai/DeepSeek-V2-Lite
trust_remote_code: true

load_in_8bit: false
load_in_4bit: false
strict: false

datasets:
- path: tatsu-lab/alpaca
type: alpaca
dataset_prepared_path: last_run_prepared
val_set_size: 0.0
output_dir: ./outputs/out

sequence_len: 2048
sample_packing: true
pad_to_sequence_len: true

wandb_project:
wandb_entity:
wandb_watch:
wandb_name:
wandb_log_model:

gradient_accumulation_steps: 8
micro_batch_size: 1
num_epochs: 1
optimizer: adamw_torch
lr_scheduler: cosine
learning_rate: 2e-5

train_on_inputs: false
group_by_length: false
bf16: auto
fp16:
tf32: false

gradient_checkpointing: true
gradient_checkpointing_kwargs:
use_reentrant: false
early_stopping_patience:
resume_from_checkpoint:
logging_steps: 1
xformers_attention:
flash_attention: true

warmup_steps: 100
evals_per_epoch: 2
eval_table_size:
saves_per_epoch: 1
debug:
deepspeed:
weight_decay: 0.0
special_tokens:
fsdp:
- full_shard
- auto_wrap
fsdp_config:
fsdp_limit_all_gathers: true
fsdp_sync_module_states: true
fsdp_offload_params: true
fsdp_use_orig_params: false
fsdp_cpu_ram_efficient_loading: true
fsdp_auto_wrap_policy: TRANSFORMER_BASED_WRAP
fsdp_transformer_layer_cls_to_wrap: DeepseekV2DecoderLayer
fsdp_state_dict_type: FULL_STATE_DICT
fsdp_sharding_strategy: FULL_SHARD
28 changes: 25 additions & 3 deletions src/axolotl/monkeypatch/multipack.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,18 @@
"""multipack patching for v2 of sample packing"""
import importlib
import sys
from pathlib import Path

import transformers
from accelerate import init_empty_weights
from accelerate import PartialState, init_empty_weights
from transformers import AutoConfig, AutoModelForCausalLM
from transformers.dynamic_module_utils import get_cached_module_file
from transformers.integrations import is_deepspeed_zero3_enabled
from transformers.utils import HF_MODULES_CACHE

from axolotl.monkeypatch.mixtral import patch_mixtral_moe_forward_zero3
from axolotl.monkeypatch.utils import get_unpad_data
from axolotl.utils.distributed import zero_only

SUPPORTED_MULTIPACK_MODEL_TYPES = [
"llama",
Expand Down Expand Up @@ -91,6 +96,23 @@ def patch_remote(model_name, config_name, modeling_name):
# we need to load the model here in order for modeling_* to be available
with init_empty_weights():
AutoModelForCausalLM.from_pretrained(model_name, trust_remote_code=True)
module_file = get_cached_module_file(model_name, modeling_name.lstrip(".") + ".py")
with zero_only():
# read the file and see if it has been patched, look for the string "axolotl.monkeypatch.utils" in the contents
patched = False
with open(Path(HF_MODULES_CACHE) / module_file, "r", encoding="utf-8") as fin:
contents = fin.read()
if "axolotl.monkeypatch.utils" in contents:
patched = True
if not patched:
with open(
Path(HF_MODULES_CACHE) / module_file, "a", encoding="utf-8"
) as fout:
fout.write(
"\nfrom axolotl.monkeypatch.utils import get_unpad_data as _get_unpad_data\n"
)
PartialState().wait_for_everyone()
module_name = model_config.__class__.__module__.replace(config_name, modeling_name)
modeling_arch = importlib.import_module(module_name)
modeling_arch._get_unpad_data = get_unpad_data # pylint: disable=protected-access
if module_name in sys.modules:
del sys.modules[module_name]
importlib.import_module(module_name)
Loading