Skip to content

Commit

Permalink
logging output fix
Browse files Browse the repository at this point in the history
  • Loading branch information
horheynm committed Nov 15, 2024
1 parent 9418de1 commit 5bc9a25
Showing 1 changed file with 11 additions and 0 deletions.
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import logging
import math
import shutil

Expand Down Expand Up @@ -62,16 +63,26 @@ def test_sparse_model_reload(compressed, config, dtype, tmp_path):
clear_sparse_session=False,
)

# temporarily set the log level to error, to ignore printing out long missing
# and unexpected key error messages (these are EXPECTED for quantized models)
transformers_logger = logging.getLogger("transformers.modeling_utils")
restore_log_level = transformers_logger.getEffectiveLevel()
transformers_logger.setLevel(level=logging.ERROR)

model = AutoModelForCausalLM.from_pretrained(
tmp_path / "oneshot_out", torch_dtype=dtype
)

# restore transformers logging level now that model shell is loaded
transformers_logger.setLevel(level=restore_log_level)

# assert that sample layer has the intended sparsity
assert math.isclose(
tensor_sparsity(model.state_dict()[one_of_sparse_weights]),
expected_sparsity,
rel_tol=1e-3,
)

inferred_structure = SparsityConfigMetadata.infer_sparsity_structure()
assert inferred_structure == "0:0"

Expand Down

0 comments on commit 5bc9a25

Please sign in to comment.