Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Jan 18, 2024
1 parent 92bb4cd commit 96855de
Showing 1 changed file with 6 additions and 4 deletions.
10 changes: 6 additions & 4 deletions tests/integration_tests/test_trainer.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
import logging
import os
import yaml
import shutil
from unittest import mock

import numpy as np
import pandas as pd
import pytest
import torch
import yaml
from packaging.version import parse as parse_version

from ludwig.api import LudwigModel
Expand Down Expand Up @@ -420,9 +420,10 @@ def test_enable_gradient_checkpointing(tmpdir, caplog):
# but does not prevent training from starting.
assert "Gradient checkpointing is currently only supported for model_type: llm. Skipping..." in caplog.text


def test_llm_batch_size_tuning():
config = yaml.safe_load(
'''
"""
model_type: llm
input_features:
- name: instruction
Expand Down Expand Up @@ -467,9 +468,10 @@ def test_llm_batch_size_tuning():
type: local
base_model: HuggingFaceH4/tiny-random-LlamaForCausalLM
ludwig_version: 0.9.dev
''')
"""
)
model = LudwigModel(config=config)
model = LudwigModel.create_model(model.config_obj)
trainer = FineTuneTrainer(model.config_obj.trainer, model)
evaluator = trainer._create_batch_size_evaluator()
print(evaluator.input_feature_name) #TEST IS NOT DONE YET
print(evaluator.input_feature_name) # TEST IS NOT DONE YET

0 comments on commit 96855de

Please sign in to comment.