Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(server): reduce memory requirement #214

Merged
merged 4 commits into from
Apr 24, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 17 additions & 6 deletions server/tests/models/test_bloom.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,12 +175,14 @@ def test_causal_lm_generate_token_completion_multi(
generations[1].generated_text.generated_tokens
== default_multi_requests_bloom_batch.stopping_criterias[1].max_new_tokens
)
# Copy stopping_criterias before filtering
stopping_criterias = default_multi_requests_bloom_batch.stopping_criterias.copy()

next_batch = next_batch.filter([next_batch.requests[0]])

for _ in range(
default_multi_requests_bloom_batch.stopping_criterias[0].max_new_tokens
- default_multi_requests_bloom_batch.stopping_criterias[1].max_new_tokens
stopping_criterias[0].max_new_tokens
- stopping_criterias[1].max_new_tokens
- 1
):
generations, next_batch = default_bloom.generate_token(next_batch)
Expand Down Expand Up @@ -212,6 +214,15 @@ def test_batch_concatenate(
next_batch_1 = default_multi_requests_bloom_batch
_, next_batch_1 = default_bloom.generate_token(next_batch_1)

# Clone past_key_values before concatenating to compare after,
# because they are removed from the concatenated batches
next_batch_0_past_key_values = [
(k.clone(), v.clone()) for (k, v) in next_batch_0.past_key_values
]
next_batch_1_past_key_values = [
(k.clone(), v.clone()) for (k, v) in next_batch_1.past_key_values
]

next_batch = BloomCausalLMBatch.concatenate([next_batch_0, next_batch_1])

assert torch.equal(next_batch.all_input_ids[0], next_batch_0.all_input_ids[0])
Expand Down Expand Up @@ -246,15 +257,15 @@ def test_batch_concatenate(
assert all([p[1].shape == (3, 16, 2, 64) for p in next_batch.past_key_values])

for i, past in enumerate(next_batch.past_key_values):
assert torch.equal(next_batch_0.past_key_values[i][0][:, :, -2:], past[0][0])
assert torch.equal(next_batch_0_past_key_values[i][0][:, :, -2:], past[0][0])
assert torch.equal(
next_batch_1.past_key_values[i][0][:, :, -1:],
next_batch_1_past_key_values[i][0][:, :, -1:],
past[0][1:, :, :, -1].reshape(-1, 64, 1),
)

assert torch.equal(next_batch_0.past_key_values[i][1][:, -2:, :], past[1][0])
assert torch.equal(next_batch_0_past_key_values[i][1][:, -2:, :], past[1][0])
assert torch.equal(
next_batch_1.past_key_values[i][1][:, -1:, :],
next_batch_1_past_key_values[i][1][:, -1:, :],
past[1][1:, :, -1, :].reshape(-1, 1, 64),
)

Expand Down
23 changes: 17 additions & 6 deletions server/tests/models/test_causal_lm.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,12 +173,14 @@ def test_causal_lm_generate_token_completion_multi(
generations[1].generated_text.generated_tokens
== default_multi_requests_causal_lm_batch.stopping_criterias[1].max_new_tokens
)
# Copy stopping_criterias before filtering
stopping_criterias = default_multi_requests_causal_lm_batch.stopping_criterias.copy()

next_batch = next_batch.filter([next_batch.requests[0]])

for _ in range(
default_multi_requests_causal_lm_batch.stopping_criterias[0].max_new_tokens
- default_multi_requests_causal_lm_batch.stopping_criterias[1].max_new_tokens
stopping_criterias[0].max_new_tokens
- stopping_criterias[1].max_new_tokens
- 1
):
generations, next_batch = default_causal_lm.generate_token(next_batch)
Expand Down Expand Up @@ -209,6 +211,15 @@ def test_batch_concatenate(
next_batch_1 = default_multi_requests_causal_lm_batch
_, next_batch_1 = default_causal_lm.generate_token(next_batch_1)

# Clone past_key_values before concatenating to compare after,
# because they are removed from the concatenated batches
next_batch_0_past_key_values = [
(k.clone(), v.clone()) for (k, v) in next_batch_0.past_key_values
]
next_batch_1_past_key_values = [
(k.clone(), v.clone()) for (k, v) in next_batch_1.past_key_values
]

next_batch = CausalLMBatch.concatenate([next_batch_0, next_batch_1])

assert torch.equal(next_batch.all_input_ids[0], next_batch_0.all_input_ids[0])
Expand Down Expand Up @@ -244,14 +255,14 @@ def test_batch_concatenate(
assert all([p[1].shape == (3, 12, 2, 64) for p in next_batch.past_key_values])

for i, past in enumerate(next_batch.past_key_values):
assert torch.equal(next_batch_0.past_key_values[i][0][0, :, -2:], past[0][0])
assert torch.equal(next_batch_0_past_key_values[i][0][0, :, -2:], past[0][0])
assert torch.equal(
next_batch_1.past_key_values[i][0][:, :, -1:], past[0][1:, :, -1:, :]
next_batch_1_past_key_values[i][0][:, :, -1:], past[0][1:, :, -1:, :]
)

assert torch.equal(next_batch_0.past_key_values[i][1][0, :, -2:], past[1][0])
assert torch.equal(next_batch_0_past_key_values[i][1][0, :, -2:], past[1][0])
assert torch.equal(
next_batch_1.past_key_values[i][1][:, :, -1:], past[1][1:, :, -1:, :]
next_batch_1_past_key_values[i][1][:, :, -1:], past[1][1:, :, -1:, :]
)

for _ in range(
Expand Down
33 changes: 23 additions & 10 deletions server/tests/models/test_seq2seq_lm.py
Original file line number Diff line number Diff line change
Expand Up @@ -219,6 +219,19 @@ def test_batch_concatenate(
next_batch_1 = default_multi_requests_seq2seq_lm_batch
_, next_batch_1 = default_seq2seq_lm.generate_token(next_batch_1)

# Copy hidden state because it is removed from the concatenated branches
next_batch_0_encoder_last_hidden_state = next_batch_0.encoder_last_hidden_state
next_batch_1_encoder_last_hidden_state = next_batch_1.encoder_last_hidden_state

# Clone past_key_values before concatenating to compare after,
# because they are removed from the concatenated batches
next_batch_0_past_key_values = [
[t.clone() for t in layer] for layer in next_batch_0.past_key_values
]
next_batch_1_past_key_values = [
[t.clone() for t in layer] for layer in next_batch_1.past_key_values
]

next_batch = Seq2SeqLMBatch.concatenate([next_batch_0, next_batch_1])

assert next_batch.batch_id == 0
Expand All @@ -239,11 +252,11 @@ def test_batch_concatenate(

assert torch.equal(
next_batch.encoder_last_hidden_state[0],
next_batch_0.encoder_last_hidden_state[0, -2:],
next_batch_0_encoder_last_hidden_state[0, -2:],
)
assert torch.equal(
next_batch.encoder_last_hidden_state[1:],
next_batch_1.encoder_last_hidden_state[:, -2:],
next_batch_1_encoder_last_hidden_state[:, -2:],
)

assert next_batch.input_lengths == [2, 2, 2]
Expand Down Expand Up @@ -275,24 +288,24 @@ def test_batch_concatenate(
)

for i, past in enumerate(next_batch.past_key_values):
assert torch.equal(next_batch_0.past_key_values[i][0][0, :, -2:, :], past[0][0])
assert torch.equal(next_batch_0_past_key_values[i][0][0, :, -2:, :], past[0][0])
assert torch.equal(
next_batch_1.past_key_values[i][0][:, :, -1:, :], past[0][1:, :, -1:, :]
next_batch_1_past_key_values[i][0][:, :, -1:, :], past[0][1:, :, -1:, :]
)

assert torch.equal(next_batch_0.past_key_values[i][1][0, :, -2:, :], past[1][0])
assert torch.equal(next_batch_0_past_key_values[i][1][0, :, -2:, :], past[1][0])
assert torch.equal(
next_batch_1.past_key_values[i][1][:, :, -1:, :], past[1][1:, :, -1:, :]
next_batch_1_past_key_values[i][1][:, :, -1:, :], past[1][1:, :, -1:, :]
)

assert torch.equal(next_batch_0.past_key_values[i][2][0, :, -2:, :], past[2][0])
assert torch.equal(next_batch_0_past_key_values[i][2][0, :, -2:, :], past[2][0])
assert torch.equal(
next_batch_1.past_key_values[i][2][:, :, -2:, :], past[2][1:]
next_batch_1_past_key_values[i][2][:, :, -2:, :], past[2][1:]
)

assert torch.equal(next_batch_0.past_key_values[i][3][0, :, -2:, :], past[3][0])
assert torch.equal(next_batch_0_past_key_values[i][3][0, :, -2:, :], past[3][0])
assert torch.equal(
next_batch_1.past_key_values[i][3][:, :, -2:, :], past[3][1:]
next_batch_1_past_key_values[i][3][:, :, -2:, :], past[3][1:]
)

for _ in range(3):
Expand Down
Loading