Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 3 additions & 5 deletions tests/models/glm4v_moe/test_modeling_glm4v_moe.py
Original file line number Diff line number Diff line change
Expand Up @@ -297,6 +297,7 @@ def test_inputs_embeds_matches_input_ids(self):


@require_torch
@slow
class Glm4vMoeIntegrationTest(unittest.TestCase):
model = None

Expand All @@ -310,7 +311,8 @@ def get_model(cls):

@classmethod
def tearDownClass(cls):
del cls.model
if hasattr(cls, "model"):
del cls.model
Comment on lines +314 to +315
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

it's quite strange the issue only happens on CircleCI , and in this case, only when a non-integration tests is run

For example

python3 -m pytest -v tests/models/glm4v_moe/test_modeling_glm4v_moe.py -k "Glm4vMoeIntegrationTest"

this doesn't have error

but

python3 -m pytest -v tests/models/glm4v_moe/test_modeling_glm4v_moe.py -k "Glm4vMoeIntegrationTest or test_attention_outputs"

have error

On daily CI runners, the second one has no error neither.

Not sure what is going on with CircleCI environment, but yeah let's keep this.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think it's just a matter of how pytest collects the different tests. In the second case, it probably creates the class, but then all tests were slow so it does not collect them and destroys the class -> but model does not exist so it crashes. With the slow decorator on the class itself I think it would never be collected so this check should not be needed, but I put it there just to be safe. In the first case, it probably does not create the class ahead of checking the tests for some reason. On daily runners, slow tests are not skipped so they run and model always exists so it never crashes

cleanup(torch_device, gc_collect=True)

def setUp(self):
Expand Down Expand Up @@ -364,7 +366,6 @@ def setUp(self):
def tearDown(self):
cleanup(torch_device, gc_collect=True)

@slow
def test_small_model_integration_test(self):
inputs = self.processor.apply_chat_template(
self.message, tokenize=True, add_generation_prompt=True, return_dict=True, return_tensors="pt"
Expand All @@ -386,7 +387,6 @@ def test_small_model_integration_test(self):
)
torch.testing.assert_close(expected_pixel_slice, inputs.pixel_values[:6, :3], atol=1e-4, rtol=1e-4)

@slow
def test_small_model_integration_test_batch(self):
model = self.get_model()
batch_messages = [self.message, self.message2, self.message_wo_image]
Expand Down Expand Up @@ -414,7 +414,6 @@ def test_small_model_integration_test_batch(self):
EXPECTED_DECODED_TEXT,
)

@slow
def test_small_model_integration_test_with_video(self):
processor = AutoProcessor.from_pretrained("zai-org/GLM-4.5V", max_image_size={"longest_edge": 50176})
model = self.get_model()
Expand All @@ -437,7 +436,6 @@ def test_small_model_integration_test_with_video(self):
)

@run_first
@slow
@require_flash_attn
@require_torch_gpu
def test_small_model_integration_test_batch_flashatt2(self):
Expand Down